You are currently on a failover version of the Materials Cloud Archive hosted at CINECA, Italy.
Click here to access the main Materials Cloud Archive.
Note: If the link above redirects you to this page, it means that the Archive is currently offline due to maintenance. We will be back online as soon as possible.
This version is read-only: you can view published records and download files, but you cannot create new records or make changes to existing ones.

A prediction rigidity formalism for low-cost uncertainties in trained neural networks


JSON Export

{
  "updated": "2024-10-17T08:11:55.637269+00:00", 
  "id": "2399", 
  "metadata": {
    "publication_date": "Oct 17, 2024, 10:11:55", 
    "edited_by": 576, 
    "doi": "10.24435/materialscloud:5r-rf", 
    "references": [
      {
        "type": "Preprint", 
        "url": "https://arxiv.org/abs/2403.02251", 
        "citation": "F. Bigi, S. Chong, M.Ceriotti, F. Grasselli, https://arxiv.org/abs/2403.02251", 
        "doi": "10.1088/2632-2153/ad805f", 
        "comment": "Paper in which the method is described"
      }
    ], 
    "description": "Quantifying the uncertainty of regression models is essential to ensure their reliability, particularly since their application often extends beyond their training domain. Based on the solution of a constrained optimization problem, this work proposes \u2018prediction rigidities\u2019 as a formalism to obtain uncertainties of arbitrary pre-trained regressors. A clear connection between the suggested framework and Bayesian inference is established, and a last-layer approximation is developed and rigorously justified to enable the application of the method to neural networks. This extension affords cheap uncertainties without any modification to the neural network itself or its training procedure. The effectiveness of this approach is shown for a wide range of regression tasks, ranging from simple toy models to applications in chemistry and meteorology.\nThis record includes computational experiments supporting the MLST paper titled \"A prediction rigidity formalism for low-cost uncertainties in trained neural networks\".", 
    "contributors": [
      {
        "email": "filippo.bigi@epfl.ch", 
        "affiliations": [
          "COSMO, Institut des Mat\u00e9riaux, \u00c9cole Polytechnique F\u00e9d\u00e9rale de Lausanne (EPFL), CH-1015 Lausanne, Vaud, Switzerland"
        ], 
        "givennames": "Filippo", 
        "familyname": "Bigi"
      }, 
      {
        "email": "sanggyu.chong@epfl.ch", 
        "affiliations": [
          "COSMO, Institut des Mat\u00e9riaux, \u00c9cole Polytechnique F\u00e9d\u00e9rale de Lausanne (EPFL), CH-1015 Lausanne, Vaud, Switzerland"
        ], 
        "givennames": "Sanggyu", 
        "familyname": "Chong"
      }, 
      {
        "email": "michele.ceriotti@epfl.ch", 
        "affiliations": [
          "COSMO, Institut des Mat\u00e9riaux, \u00c9cole Polytechnique F\u00e9d\u00e9rale de Lausanne (EPFL), CH-1015 Lausanne, Vaud, Switzerland"
        ], 
        "givennames": "Michele", 
        "familyname": "Ceriotti"
      }, 
      {
        "email": "fede.grasselli@gmail.com", 
        "affiliations": [
          "COSMO, Institut des Mat\u00e9riaux, \u00c9cole Polytechnique F\u00e9d\u00e9rale de Lausanne (EPFL), CH-1015 Lausanne, Vaud, Switzerland"
        ], 
        "givennames": "Federico", 
        "familyname": "Grasselli"
      }
    ], 
    "is_last": true, 
    "license_addendum": null, 
    "_files": [
      {
        "description": "The zip file contains (1) an implementation of the proposed method and (2) all experiments associated with the manuscript. The archive can be navigated thanks to the readme files in each subfolder.", 
        "checksum": "md5:70c0fb1fb3bce6f9147a65890f27ec7f", 
        "size": 523247587, 
        "key": "llpr.zip"
      }
    ], 
    "version": 1, 
    "id": "2399", 
    "conceptrecid": "2398", 
    "owner": 1528, 
    "license": "Creative Commons Attribution 4.0 International", 
    "mcid": "2024.166", 
    "_oai": {
      "id": "oai:materialscloud.org:2399"
    }, 
    "status": "published", 
    "title": "A prediction rigidity formalism for low-cost uncertainties in trained neural networks", 
    "keywords": [
      "machine learning", 
      "MARVEL/P2", 
      "uncertainty quantification", 
      "neural networks", 
      "Laplace approximation", 
      "prediction rigidity", 
      "confidence interval", 
      "regressors", 
      "neural tangent kernel"
    ]
  }, 
  "revision": 7, 
  "created": "2024-10-13T12:45:32.796804+00:00"
}