{
  "format_version": 3,
  "claim_natural": "Consider a spike-train encoding model where spikes are generated by an inhomogeneous Poisson process with intensity lambda_t = f(eta_t), eta_t = x_t^T beta + h_t^T gamma + b, with convex parameter space for theta = (beta, gamma, b). If f is positive, convex, and log-concave, then the log-likelihood is concave in theta. Therefore every local maximum is global, ML fitting is a convex optimization problem, and the same holds for MAP inference under any log-concave prior on theta.",
  "claim_formal": {
    "subject": "Poisson spike-train log-likelihood with link function f",
    "property": "concavity of log-likelihood in theta when f is positive, convex, and log-concave",
    "operator": "==",
    "threshold": true,
    "operator_note": "The claim is a mathematical theorem asserting concavity of a composite functional form. We verify: (1) the log-likelihood decomposes into a sum of terms each of which is concave in theta, via standard composition rules of convex analysis; (2) numerical Hessian checks on a concrete example (f = exp) confirm negative semidefiniteness; (3) MAP extension follows by the sum-of-concave-is-concave rule. The claim is TRUE iff all sub-steps hold."
  },
  "evidence": {
    "A1": {
      "type": "computed",
      "label": "Composition: concave(affine) is concave",
      "sub_claim": null,
      "method": "Convex analysis composition rule applied to log f and affine eta_t(theta)",
      "result": "True",
      "depends_on": []
    },
    "A2": {
      "type": "computed",
      "label": "Convex(affine) is convex => -f(affine) concave",
      "sub_claim": null,
      "method": "Convex analysis composition rule applied to f and affine eta_t(theta)",
      "result": "True",
      "depends_on": []
    },
    "A3": {
      "type": "computed",
      "label": "Non-negative scalar times concave is concave",
      "sub_claim": null,
      "method": "Scaling rule: n_t >= 0 preserves concavity of log f(eta_t)",
      "result": "True",
      "depends_on": []
    },
    "A4": {
      "type": "computed",
      "label": "Log-likelihood is sum of concave terms => concave",
      "sub_claim": null,
      "method": "Summation of concave terms from A1-A3 over time bins",
      "result": "True",
      "depends_on": [
        "A1",
        "A2",
        "A3"
      ]
    },
    "A5": {
      "type": "computed",
      "label": "Numerical Hessian eigenvalues all <= 0 (f=exp)",
      "sub_claim": null,
      "method": "Finite-difference Hessian at 5 random points; max eigenvalue -6.71e-02",
      "result": "True",
      "depends_on": []
    },
    "A6": {
      "type": "computed",
      "label": "MAP multi-start optimization converges to unique optimum",
      "sub_claim": null,
      "method": "10 random starts with Gaussian prior; spread 1.17e-10",
      "result": "True",
      "depends_on": [
        "A4"
      ]
    },
    "A7": {
      "type": "computed",
      "label": "ML multi-start optimization converges to unique optimum",
      "sub_claim": null,
      "method": "10 random starts; log-likelihood spread 2.80e-06",
      "result": "True",
      "depends_on": [
        "A4"
      ]
    }
  },
  "cross_checks": [
    {
      "description": "Analytical vs numerical Hessian for f=exp",
      "fact_ids": [
        "A4",
        "A5"
      ],
      "agreement": true,
      "values_compared": [
        "Analytical: max eig = -4.19e-02",
        "Numerical: max eig = -6.71e-02"
      ]
    },
    {
      "description": "Multi-start ML confirms unique optimum (independent of Hessian check)",
      "fact_ids": [
        "A5",
        "A7"
      ],
      "agreement": true,
      "values_compared": [
        "Hessian NSD: True",
        "Unique optimum: True"
      ]
    }
  ],
  "adversarial_checks": [
    {
      "question": "Is 'positive, convex, and log-concave' a vacuous set of conditions? Do common link functions actually satisfy all three simultaneously?",
      "verification_performed": "Checked exp(x): positive (exp(x)>0), convex (exp''=exp>0), log-concave (log(exp(x))=x is concave/linear). Also checked f(x)=x for x>0: positive, convex, log f = log x is concave. Softplus log(1+exp(x)): positive, convex, and log-concave (can be shown by checking second derivative of log(softplus(x))). Multiple common link functions satisfy all three conditions.",
      "finding": "The conditions are not vacuous. The exponential, identity-on-positive-reals, and softplus functions all satisfy positivity, convexity, and log-concavity.",
      "breaks_proof": false
    },
    {
      "question": "Could the proof break for non-compact or unbounded parameter spaces? The claim assumes convex parameter space \u2014 does concavity still guarantee a unique global maximum exists?",
      "verification_performed": "Concavity guarantees that every local maximum is global, but does not guarantee existence of a maximum (the supremum might not be attained). For f = exp, the log-likelihood can go to -\u221e as ||\u03b8|| \u2192 \u221e (since the -exp(\u03b7_t)\u0394t term dominates), ensuring a maximum exists. For general f, existence requires additional compactness or coercivity conditions. The claim as stated says 'every local maximum is global' and 'ML fitting is a convex optimization problem' \u2014 both are true by concavity alone. Existence is a separate (unstated) concern.",
      "finding": "The claim is about concavity and the local=global property, not existence. Concavity does guarantee local=global. Existence of the MLE may require additional conditions but is not part of the stated claim.",
      "breaks_proof": false
    },
    {
      "question": "Is the claim that 'every local maximum is global' actually a theorem for concave functions, or does it require strict concavity?",
      "verification_performed": "Standard result in convex analysis: if f is concave and x* is a local maximum, then for any y and small t>0, f(x* + t(y-x*)) \u2264 f(x*). By concavity, f(x* + t(y-x*)) \u2265 tf(y) + (1-t)f(x*), so tf(y) + (1-t)f(x*) \u2264 f(x*), giving f(y) \u2264 f(x*). This holds for any y, so x* is a global maximum. Strict concavity would additionally give uniqueness.",
      "finding": "Confirmed: every local maximum of a concave function is global. This does NOT require strict concavity. Strict concavity gives uniqueness, which the claim does not assert.",
      "breaks_proof": false
    }
  ],
  "verdict": {
    "value": "PROVED",
    "qualified": false,
    "qualifier": null,
    "reason": null
  },
  "key_results": {
    "all_sub_claims_hold": true,
    "hessian_max_eigenvalue": -0.06711474043145206,
    "ml_multistart_spread": 2.797036581767293e-06,
    "map_multistart_spread": 1.1728398807697715e-10,
    "analytical_hessian_max_eig": -0.041917791557041076,
    "claim_holds": true
  },
  "generator": {
    "name": "proof-engine",
    "version": "1.23.0",
    "repo": "https://github.com/yaniv-golan/proof-engine",
    "generated_at": "2026-04-18"
  },
  "proof_py_url": "/proofs/poisson-spike-train-loglik-concave/proof.py",
  "citation": {
    "doi": "10.5281/zenodo.19645244",
    "concept_doi": "10.5281/zenodo.19645243",
    "url": "https://proofengine.info/proofs/poisson-spike-train-loglik-concave/",
    "author": "Proof Engine",
    "cite_bib_url": "/proofs/poisson-spike-train-loglik-concave/cite.bib",
    "cite_ris_url": "/proofs/poisson-spike-train-loglik-concave/cite.ris"
  },
  "depends_on": []
}