{"version":3,"sources":["assets/AMAZON.png","assets/googlelogo_color_272x92dp.png","assets/MIT-IBM-logo.png","assets/MBUZAI.png","assets/Qualcomm.jpg","assets/Schmidt1.png","assets/microsoft.png","assets/causalens.png","assets/Picture1.png","assets/Picture2.png","assets/Picture3.png","assets/science_hub_banner3.png","assets/apple_logo.jpg","assets/servicenow_logo.png","assets/logo_v3.png","components/Header/Header.jsx","common/constants/navItems.js","components/NavBar/components/NavItem/NavItem.jsx","components/NavBar/components/DropdownItem/DropdownItem.jsx","components/NavBar/NavBar.jsx","pages/2022/ImportantDatesPage/components/DateTable/DatesTable.jsx","components/Title/Title.jsx","common/constants/importantDates.js","pages/2022/ImportantDatesPage/index.jsx","pages/2022/AcceptedPapersPage/components/PaperTable/PaperTable.jsx","common/constants/acceptedPapers_oral.js","common/constants/acceptedPapers_poster.js","pages/2022/AcceptedPapersPage/index.jsx","pages/2022/OrganizingCommitteePage/components/OCTable/OCTable.js","common/constants/organizingCommittee.js","pages/2022/OrganizingCommitteePage/index.jsx","pages/2022/ReviewerInstructionPage/index.jsx","common/constants/advisoryboard.js","pages/2022/AdvisoryBoardPage/index.jsx","pages/2022/SponsorsPage/index.jsx","pages/2022/AreaChairPage/index.jsx","pages/2022/ProgramCommitteePage/index.jsx","pages/2022/CLeaRPage/index.jsx","pages/2022/FullAgendaPage/index.jsx","pages/2022/KeynoteSpeakerPage/index.jsx","pages/2022/CallforPapersPage/index.jsx","pages/2022/ACInstructionPage/index.jsx","pages/2022/CodeConductPage/index.jsx","pages/2022/CoIPage/index.jsx","pages/2022/CameraReady/index.jsx","pages/2022/RegistrationPage/index.jsx","pages/2023/ImportantDatesPage/components/DateTable/DatesTable.jsx","pages/2023/ImportantDatesPage/index.jsx","pages/2023/AcceptedDatasetsPage/components/PaperTable/PaperTable.jsx","common/constants/acceptedDatasets_2023.js","pages/2023/AcceptedDatasetsPage/index.jsx","pages/2023/AcceptedPapersPage/components/PaperTable/PaperTable.jsx","common/constants/acceptedPapers_oral2023.js","common/constants/acceptedPapers_poster2023.js","common/constants/acceptedPapers_best2023.js","pages/2023/AcceptedPapersPage/index.jsx","pages/2023/OrganizingCommitteePage/components/OCTable/OCTable.js","pages/2023/OrganizingCommitteePage/index.jsx","pages/2023/ReviewerInstructionPage/index.jsx","pages/2023/AdvisoryBoardPage/index.jsx","pages/2023/SponsorsPage/index.jsx","pages/2023/AreaChairPage/index.jsx","pages/2023/ProgramCommitteePage/index.jsx","pages/2023/CLeaRPage/index.jsx","pages/2023/FullAgendaPage/index.jsx","pages/2023/KeynoteSpeakerPage/index.jsx","pages/2023/CallforPapersPage/index.jsx","pages/2023/CallforDatasetsPage/index.jsx","pages/2023/ACInstructionPage/index.jsx","pages/2023/CodeConductPage/index.jsx","pages/2023/CoIPage/index.jsx","pages/2023/CameraReady/index.jsx","pages/2023/RegistrationPage/index.jsx","pages/2023/PresentationInstructionPage/index.jsx","pages/2023/OnlineSchedulePage/index.jsx","pages/2023/SocialActivitiesPage/index.jsx","pages/2023/StudentVolunteersPage/index.jsx","pages/2024/ImportantDatesPage/components/DateTable/DatesTable.jsx","pages/2024/ImportantDatesPage/index.jsx","pages/2024/AcceptedDatasetsPage/components/PaperTable/PaperTable.jsx","pages/2024/AcceptedDatasetsPage/index.jsx","pages/2024/AcceptedPapersPage/components/PaperTable/PaperTable.jsx","pages/2024/AcceptedPapersPage/index.jsx","pages/2024/OrganizingCommitteePage/components/OCTable/OCTable.js","pages/2024/OrganizingCommitteePage/index.jsx","pages/2024/ReviewerInstructionPage/index.jsx","pages/2024/AdvisoryBoardPage/index.jsx","pages/2024/SponsorsPage/index.jsx","pages/2024/AreaChairPage/index.jsx","pages/2024/ProgramCommitteePage/index.jsx","pages/2024/CLeaRPage/index.jsx","pages/2024/FullAgendaPage/index.jsx","pages/2024/KeynoteSpeakerPage/index.jsx","pages/2024/CallforPapersPage/index.jsx","pages/2024/ACInstructionPage/index.jsx","pages/2024/CodeConductPage/index.jsx","pages/2024/CoIPage/index.jsx","pages/2024/CameraReady/index.jsx","pages/2024/RegistrationPage/index.jsx","pages/2024/PresentationInstructionPage/index.jsx","pages/2024/OnlineSchedulePage/index.jsx","pages/2024/SocialActivitiesPage/index.jsx","pages/2024/StudentVolunteersPage/index.jsx","pages/2025/ImportantDatesPage/components/DateTable/DatesTable.jsx","pages/2025/ImportantDatesPage/index.jsx","pages/2025/AcceptedDatasetsPage/components/PaperTable/PaperTable.jsx","common/constants/acceptedDatasets_2025.js","pages/2025/AcceptedDatasetsPage/index.jsx","pages/2025/AcceptedPapersPage/components/PaperTable/PaperTable.jsx","common/constants/acceptedPapers_oral2025.js","common/constants/acceptedPapers_poster2025.js","common/constants/acceptedPapers_best2025.js","pages/2025/AcceptedPapersPage/index.jsx","pages/2025/OrganizingCommitteePage/components/OCTable/OCTable.js","pages/2025/OrganizingCommitteePage/index.jsx","pages/2025/ReviewerInstructionPage/index.jsx","pages/2025/AdvisoryBoardPage/index.jsx","pages/2025/SponsorsPage/index.jsx","pages/2025/AreaChairPage/index.jsx","pages/2025/ProgramCommitteePage/index.jsx","pages/2025/CLeaRPage/index.jsx","pages/2025/FullAgendaPage/index.jsx","pages/2025/KeynoteSpeakerPage/index.jsx","pages/2025/CallforPapersPage/index.jsx","pages/2025/ACInstructionPage/index.jsx","pages/2025/CodeConductPage/index.jsx","pages/2025/CoIPage/index.jsx","pages/2025/CameraReady/index.jsx","pages/2025/RegistrationPage/index.jsx","pages/2025/PresentationInstructionPage/index.jsx","pages/2025/OnlineSchedulePage/index.jsx","pages/2025/SocialActivitiesPage/index.jsx","pages/2025/StudentVolunteersPage/index.jsx","components/Content/Content.jsx","app/App.jsx","index.js"],"names":["Header","year","useLocation","pathname","split","className","src","Logo","alt","navItems","callItems","dropdown","items","programItems","yearItems","dropItems","NavItem","children","to","join","YearItem","id","data-toggle","data-display","aria-haspopup","aria-expanded","aria-labelledby","map","item","DropdownItem","NavBar","includes","DatesTable","array","width","name","date","Title","importantDates2022","importantDates2023","importantDates2024","importantDates2025","ImportantDates2022page","PaperTable","title","author","description","lines","acceptedPapers","AcceptedPapers2022page","acceptedPapers_oral","acceptedPapers_poster","OCTable","position","persons","href","address","array2022","array2023","array2024","array2025","OrganizingCommittee2022page","ReviewerInstruction2022page","array1","AdvisoryBoard2022page","Sponsors2022page","require","default","AreaChair2022page","ProgramCommittee2022page","CLeaR2022page","class","HREF","FullAgenda2022page","color","KeynoteSpeaker2022page","CallforPapers2022page","download","ACInstruction2022page","CodeConduct2022page","CameraReady2022page","Registration2022page","border","ImportantDates2023page","filePath","process","target","rel","acceptedDatasets","AcceptedDatasets2023page","AcceptedPapers2023page","acceptedPapers_best","OrganizingCommittee2023page","ReviewerInstruction2023page","AdvisoryBoard2023page","Sponsors2023page","AreaChair2023page","ProgramCommittee2023page","CLeaR2023page","FullAgenda2023page","KeynoteSpeaker2023page","CallforPapers2023page","CallforDatasets2023page","ACInstruction2023page","CodeConduct2023page","CameraReady2023page","Registration2023page","PresentationInstruction2023page","ZoomLink","topic","time","link","meetingId","passcode","localNumberLink","style","marginTop","marginBottom","papers","downloadLink","ZoomLinks","props","state","isAuthenticated","password","handleSubmit","bind","handleChange","event","preventDefault","this","setState","alert","value","paper","onSubmit","type","onChange","React","Component","OnlineSchedulepage","SocialActivities2023page","StudentVolunteers2023page","ImportantDates2024page","OrganizingCommittee2024page","ReviewerInstruction2024page","Sponsors2024page","AreaChair2024page","ProgramCommittee2024page","CLeaR2024page","FullAgenda2024page","CallforPapers2024page","ACInstruction2024page","CodeConduct2024page","Registration2024page","ImportantDates2025page","AcceptedDatasets2025page","AcceptedPapers2025page","OrganizingCommittee2025page","ReviewerInstruction2025page","AdvisoryBoard2025page","Sponsors2025page","AreaChair2025page","ProgramCommittee2025page","CLeaR2025page","FullAgenda2025page","CallforPapers2025page","ACInstruction2025page","CodeConduct2025page","CameraReady2025page","Registration2025page","PresentationInstruction2025page","SocialActivities2025page","StudentVolunteers2025page","Content","exact","path","component","CodeofConduct2022page","CoI2022page","Registration2022","CLeaR2022Page","CodeofConduct2023page","CoI2023page","Registration2023","OnlineSchedule2023page","CLeaR2023Page","AcceptedDatasets2024page","AcceptedPapers2024page","AdvisoryBoard2024page","KeynoteSpeaker2024page","CodeofConduct2024page","CoI2024page","CameraReady2024page","Registration2024","PresentationInstruction2024page","OnlineSchedule2024page","SocialActivities2024page","StudentVolunteers2024page","CLeaR2024Page","KeynoteSpeaker2025page","CodeofConduct2025page","CoI2025page","Registration2025","OnlineSchedule2025page","CLeaR2025Page","App","ReactDOM","render","StrictMode","document","getElementById"],"mappings":"iKAAA,OAAe,cAA0B,oC,6BCAzC,OAAe,cAA0B,uD,+NCAzC,OAAe,cAA0B,0C,6BCAzC,OAAe,cAA0B,oC,6BCAzC,OAAe,cAA0B,sC,6BCAzC,OAAe,cAA0B,sC,2JCAzC,OAAe,wvH,6BCAf,OAAe,gsK,iECAf,OAAe,cAA0B,sC,6BCAzC,OAAe,cAA0B,sC,6BCAzC,OAAe,cAA0B,sC,uHCAzC,OAAe,cAA0B,iD,6BCAzC,OAAe,cAA0B,wC,6BCAzC,OAAe,cAA0B,6C,uTCA1B,MAA0B,oC,eCiB1BA,EAZA,WACb,IAAMC,EAAOC,cAAcC,SAASC,MAAM,KAAK,GAC/C,OACE,wBAAQC,UAAU,SAAlB,SACE,sBAAKA,UAAU,YAAf,UACE,qBAAKC,IAAKC,EAAMC,IAAI,SAClB,yCAAU,SAAWP,W,QCXlBQ,EAAW,CACtB,yBACA,kBACA,wBACA,kBACA,kBACA,wBACA,YAGWC,EAAY,CACvB,KAAQ,CACN,CACIC,SAAU,QACVC,MAAO,CACL,qBAIR,KAAQ,CACN,CACID,SAAU,QACVC,MAAO,CACL,kBACA,uBAIR,KAAQ,CACN,CACID,SAAU,QACVC,MAAO,CACL,qBAIR,KAAQ,CACN,CACID,SAAU,QACVC,MAAO,CACL,sBAQGC,EAAe,CAC1B,KAAQ,CACN,CACEF,SAAU,UACVC,MAAO,CACL,cACA,mBACA,qBAIN,KAAQ,CACN,CACED,SAAU,UACVC,MAAO,CACL,cACA,mBACA,kBACA,oBACA,4BACA,kBACA,uBAIN,KAAQ,CACN,CACED,SAAU,UACVC,MAAO,CACL,iBAIP,KAAQ,CACL,CACED,SAAU,UACVC,MAAO,CACL,kBAiBKE,EAAY,CACvB,CACEH,SAAU,OACVC,MAAO,CACL,OACA,OACA,OACA,UAKOG,EAAY,CACvB,KAAQ,CACN,CACEJ,SAAU,aACVC,MAAO,CACL,uBACA,cACA,oBACA,oBAIN,KAAQ,CACN,CACED,SAAU,aACVC,MAAO,CACL,uBACA,cACA,iBACA,wBAIN,KAAQ,CACN,CACED,SAAU,aACVC,MAAO,CACL,uBACA,cACA,iBACA,wBAIN,KAAQ,CACN,CACED,SAAU,aACVC,MAAO,CACL,uBACA,cACA,iBACA,yBC9IOI,G,MATC,SAAC,GAAgB,IAAfC,EAAc,EAAdA,SACVhB,EAAOC,cAAcC,SAASC,MAAM,KAAK,GAC/C,OACE,oBAAIC,UAAU,WAAd,SACE,cAAC,IAAD,CAASA,UAAU,WAAWa,GAAE,WAAMjB,EAAO,IAAKgB,EAASb,MAAM,KAAKe,KAAK,KAA3E,SAAmFF,QCH5EG,G,MAAW,SAAC,GAAuB,IAAtBT,EAAqB,EAArBA,SAAUC,EAAW,EAAXA,MAClC,OACE,sBAAKP,UAAU,qBAAf,UACE,mBACEA,UAAU,2BACVgB,GAAIV,EACJW,cAAY,WACZC,eAAa,WACbC,gBAAc,OACdC,gBAAc,QANhB,SAQGd,IAEH,qBAAKN,UAAU,gBAAgBqB,kBAAiBf,EAAhD,SACGC,EAAMe,KAAI,SAACC,GAAD,OACT,cAAC,IAAD,CAAMvB,UAAU,gBAAgBa,GAAE,WAAMU,EAAKxB,MAAM,KAAKe,KAAK,KAA7D,SACGS,cAQAC,EAAe,SAAC,GAAuB,IAAtBlB,EAAqB,EAArBA,SAAUC,EAAW,EAAXA,MAChCX,EAAOC,cAAcC,SAASC,MAAM,KAAK,GAC/C,OACE,sBAAKC,UAAU,qBAAf,UACE,mBACEA,UAAU,2BACVgB,GAAIV,EACJW,cAAY,WACZC,eAAa,WACbC,gBAAc,OACdC,gBAAc,QANhB,SAQGd,IAEH,qBAAKN,UAAU,gBAAgBqB,kBAAiBf,EAAhD,SACGC,EAAMe,KAAI,SAACC,GAAD,OACT,cAAC,IAAD,CAAMvB,UAAU,gBAAgBa,GAAE,WAAMjB,EAAO,IAAK2B,EAAKxB,MAAM,KAAKe,KAAK,KAAzE,SACGS,aCTEE,G,MA9BA,WACb,IAAM7B,EAAOC,cAAcC,SAASC,MAAM,KAAK,GAI/C,OAHAU,EAAU,GAAGH,SAAW,SAAWV,EAAO,IAGtCA,GAAQa,EAAU,GAAGF,MAAMmB,SAAS9B,GAEpC,qBAAKI,UAAU,iBAAf,SACE,sBAAKA,UAAU,kBAAf,UACES,EAAUa,KAAI,SAAAC,GAAI,OAAI,cAAC,EAAD,eAAcA,OACnCb,EAAUd,GAAM0B,KAAI,SAAAC,GAAI,OAAI,cAAC,EAAD,eAAkBA,OAC9ClB,EAAUT,GAAM0B,KAAI,SAAAC,GAAI,OAAI,cAAC,EAAD,eAAkBA,OAC9Cf,EAAaZ,GAAM0B,KAAI,SAAAC,GAAI,OAAI,cAAC,EAAD,eAAkBA,OACjDnB,EAASkB,KAAI,SAAAC,GAAI,OAAI,cAAC,EAAD,UAAUA,YAMpC,qBAAKvB,UAAU,iBAAf,SACE,sBAAKA,UAAU,kBAAf,UACGS,EAAUa,KAAI,SAAAC,GAAI,OAAI,cAAC,EAAD,eAAcA,OACpCnB,EAASkB,KAAI,SAAAC,GAAI,OAAI,cAAC,EAAD,UAAUA,cCL3BI,G,MArBI,SAAC,GAAa,IAAZC,EAAW,EAAXA,MACnB,OACE,wBAAO5B,UAAU,mBAAjB,UACE,gCACE,+BACE,oBAAI6B,MAAM,MAAV,kBACA,2CAGJ,gCACGD,EAAMN,KAAI,gBAAGQ,EAAH,EAAGA,KAAMC,EAAT,EAASA,KAAT,OACT,+BACE,6BAAKD,IACL,6BAAKC,iBCNFC,G,MAPD,SAAC,GAAgB,IAAfpB,EAAc,EAAdA,SACd,OACE,oBAAIZ,UAAU,QAAd,SACGY,MCNMqB,EAAqB,CAChC,CAAEH,KAAM,4BAA6BC,KAAM,kDAC3C,CAAED,KAAM,mBAAoBC,KAAM,kBAClC,CAAED,KAAM,uBAAwBC,KAAM,8BACtC,CAAED,KAAM,kBAAmBC,KAAM,mBACjC,CAAED,KAAM,wBAAyBC,KAAM,gCACvC,CAAED,KAAM,kBAAmBC,KAAM,0BAGtBG,EAAqB,CAChC,CAAEJ,KAAM,4BAA6BC,KAAM,kDAC3C,CAAED,KAAM,mBAAoBC,KAAM,iBAClC,CAAED,KAAM,uBAAwBC,KAAM,8BACtC,CAAED,KAAM,kBAAmBC,KAAM,mBACjC,CAAED,KAAM,wBAAyBC,KAAM,gCACvC,CAAED,KAAM,kBAAmBC,KAAM,wBACjC,CAAED,KAAM,8BAA+BC,KAAM,iCAGlCI,EAAqB,CAChC,CAAEL,KAAM,4BAA6BC,KAAM,kDAC3C,CAAED,KAAM,mBAAoBC,KAAM,iBAClC,CAAED,KAAM,uBAAwBC,KAAM,8BACtC,CAAED,KAAM,kBAAmBC,KAAM,mBACjC,CAAED,KAAM,wBAAyBC,KAAM,gCACvC,CAAED,KAAM,kBAAmBC,KAAM,sBAND,GASrBK,EAAqB,CAChC,CAAEN,KAAM,4BAA6BC,KAAM,iDAC3C,CAAED,KAAM,mBAAoBC,KAAM,kBAClC,CAAED,KAAM,uBAAwBC,KAAM,+BACtC,CAAED,KAAM,kBAAmBC,KAAM,mBACjC,CAAED,KAAM,wBAAyBC,KAAM,gCACvC,CAAED,KAAM,kBAAmBC,KAAM,gCAND,GCdnBM,EATgB,WAC7B,OACE,qCACE,cAAC,EAAD,8BACA,cAAC,EAAD,CAAYT,MAAOK,Q,gBCWVK,G,MAhBI,SAAC,GAAe,IAAbV,EAAY,EAAZA,MACpB,OACE,qBAAK5B,UAAU,mBAAf,SACG4B,EAAMN,KAAI,gBAAEiB,EAAF,EAAEA,MAAOC,EAAT,EAASA,OAAQC,EAAjB,EAAiBA,YAAjB,OACT,6BACE,+BACE,oBAAIzC,UAAU,oBAAd,SAAmCuC,IAClCC,EACD,cAAC,IAAD,CAAcE,MAAO,EAArB,SAAyBD,gBCoCtBE,EAhDQ,CACrB,CACEJ,MAAO,8BACPC,OACE,4CAEJ,CACED,MAAO,oEACPC,OACE,uNAEJ,CACED,MAAO,iCACPC,OACE,oEAEJ,CACED,MAAO,sCACPC,OACE,sIAEJ,CACED,MAAO,0FACPC,OACE,kYAEJ,CACED,MAAO,2DACPC,OACE,sFAEJ,CACED,MAAO,6CACPC,OACE,mHAEJ,CACED,MAAO,8DACPC,OACE,kHAEJ,CACED,MAAO,gEACPC,OACE,iMC+JSG,EA3MQ,CACrB,CACEJ,MAAO,kGACPC,OACE,gJAEJ,CACED,MAAO,gEACPC,OACE,gIAEJ,CACED,MAAO,iHACPC,OACE,wFAEJ,CACED,MAAO,oFACPC,OACE,6JAEJ,CACED,MAAO,sEACPC,OACE,kFAEJ,CACED,MAAO,wDACPC,OACE,kEAEJ,CACED,MAAO,iCACPC,OACE,8EAEJ,CACED,MAAO,+DACPC,OACE,kHAEJ,CACED,MAAO,yCACPC,OACE,6IAEJ,CACED,MAAO,mFACPC,OACE,8MAEJ,CACED,MAAO,gDACPC,OACE,6GAEJ,CACED,MAAO,4EACPC,OACE,2HAEJ,CACED,MAAO,iFACPC,OACE,6JAEJ,CACED,MAAO,iFACPC,OACE,yUAEJ,CACED,MAAO,mEACPC,OACE,+IAEJ,CACED,MAAO,6DACPC,OACE,qKAEJ,CACED,MAAO,wDACPC,OACE,4FAEJ,CACED,MAAO,2FACPC,OACE,sSAEJ,CACED,MAAO,2EACPC,OACE,kFAEJ,CACED,MAAO,kDACPC,OACE,6EAEJ,CACED,MAAO,sDACPC,OACE,oHAEJ,CACED,MAAO,0DACPC,OACE,qLAEJ,CACED,MAAO,oGACPC,OACE,iMAEJ,CACED,MAAO,iFACPC,OACE,gHAEJ,CACED,MAAO,qGACPC,OACE,4IAEJ,CACED,MAAO,4CACPC,OACE,+DAEJ,CACED,MAAO,8GACPC,OACE,yFAEJ,CACED,MAAO,uDACPC,OACE,gRAEJ,CACED,MAAO,+DACPC,OACE,6GAEJ,CACED,MAAO,sCACPC,OACE,+CAEJ,CACED,MAAO,+EACPC,OACE,iLAEJ,CACED,MAAO,sIACPC,OACE,2EAEJ,CACED,MAAO,kEACPC,OACE,6GAEJ,CACED,MAAO,qEACPC,OACE,gIAEJ,CACED,MAAO,6CACPC,OACE,kLAEJ,CACED,MAAO,kEACPC,OACE,+IAEJ,CACED,MAAO,wHACPC,OACE,oFAEJ,CACED,MAAO,yEACPC,OACE,sMAEJ,CACED,MAAO,4DACPC,OACE,+LAEJ,CACED,MAAO,qDACPC,OACE,+JCpLSI,EAbgB,WAC7B,OACE,gCACE,cAAC,EAAD,8BACA,6BAAI,uCACJ,cAAC,EAAD,CAAYhB,MAAOiB,IACnB,6BAAI,yCACJ,cAAC,EAAD,CAAYjB,MAAOkB,QCaVC,G,MAvBC,SAAC,GAAe,IAAbnB,EAAY,EAAZA,MACjB,OACE,uBAAO5B,UAAU,gBAAjB,SACG4B,EAAMN,KAAI,gBAAG0B,EAAH,EAAGA,SAAUC,EAAb,EAAaA,QAAb,OACT,6BACE,+BACE,oBAAIjD,UAAU,oBAAd,SAAmCgD,IAClCC,EAAQ3B,KAAI,gBAAGQ,EAAH,EAAGA,KAAMoB,EAAT,EAASA,KAAMC,EAAf,EAAeA,QAAf,OACX,sBAAKnD,UAAU,kBAAf,UACE,mBAAGA,UAAU,wBAAwBkD,KAAMA,EAA3C,SACGpB,IAEH,uBACA,sBAAM9B,UAAU,mBAAhB,SAAoCmD,sBChBvCC,EAAY,CACvB,CACEJ,SAAU,6BACVC,QAAS,CACP,CACEnB,KAAM,wBACNoB,KAAM,4BACNC,QAAS,yDAEV,CACCrB,KAAM,iBACNoB,KAAM,iCACNC,QAAS,8CAEX,CACErB,KAAM,YACNoB,KAAM,wCACNC,QAAS,qCAIf,CACEH,SAAU,yCACVC,QAAS,CACP,CACEnB,KAAM,4BACNoB,KAAM,+BACNC,QAAS,gBAEX,CACErB,KAAM,iCACNoB,KAAM,uCACNC,QAAS,2CAEV,CACCrB,KAAM,kBACNoB,KAAM,uCACNC,QAAS,2CAEX,CACErB,KAAM,yBACNoB,KAAM,0DACNC,QAAS,2DAIf,CACEH,SAAU,qBACVC,QAAS,CACP,CACEnB,KAAM,cACNoB,KAAM,mCACNC,QAAS,yDAEX,CACErB,KAAM,oBACNoB,KAAM,8BACNC,QAAS,uBAIf,CACEH,SAAU,uBACVC,QAAS,CACP,CACEnB,KAAM,aACNoB,KAAM,gCACNC,QAAS,6CAEX,CACErB,KAAM,kBACNoB,KAAM,4BACNC,QAAS,kBAIf,CACEH,SAAU,qBACVC,QAAS,CACP,CACEnB,KAAM,kBACNoB,KAAM,iCACNC,QAAS,wCAEX,CACErB,KAAM,sBACNoB,KAAM,+DACNC,QAAS,kDAIf,CACEH,SAAU,gBACVC,QAAS,CACP,CACEnB,KAAM,eACNoB,KAAM,+DACNC,QAAS,6CAEX,CACErB,KAAM,sBACNoB,KAAM,2CACNC,QAAS,0CAIf,CACEH,SAAU,kBACVC,QAAS,CACP,CACEnB,KAAM,cACNoB,KAAM,0BACNC,QAAS,mCAEX,CACErB,KAAM,kBACNoB,KAAM,4BACNC,QAAS,8BAIf,CACEH,SAAU,gBACVC,QAAS,CACP,CACEnB,KAAM,wBACNoB,KAAM,iCACNC,QAAS,2BAEX,CACErB,KAAM,sBACNoB,KAAM,yBACNC,QAAS,uCAIf,CACEH,SAAU,kBACVC,QAAS,CACP,CACEnB,KAAM,gBACNoB,KAAM,mCACNC,QAAS,6CAMJE,EAAY,CACvB,CACEL,SAAU,iBACVC,QAAS,CACP,CACEnB,KAAM,sBACNoB,KAAM,sCACNC,QAAS,gDAEV,CACCrB,KAAM,gBACNoB,KAAM,sEACNC,QAAS,SAIf,CACEH,SAAU,iBACVC,QAAS,CACP,CACEnB,KAAM,yBACNoB,KAAM,oCACNC,QAAS,2BAEV,CACCrB,KAAM,cACNoB,KAAM,2BACNC,QAAS,sBAEX,CACErB,KAAM,kBACNoB,KAAM,6BACNC,QAAS,yBAIf,CACEH,SAAU,qBACVC,QAAS,CACP,CACEnB,KAAM,oBACNoB,KAAM,8BACNC,QAAS,UAEX,CACErB,KAAM,iBACNoB,KAAM,4BACNC,QAAS,4BAIf,CACEH,SAAU,gBACVC,QAAS,CACP,CACEnB,KAAM,sBACNoB,KAAM,8BACNC,QAAS,uBAEX,CACErB,KAAM,sBACNoB,KAAM,0DACNC,QAAS,kDAIf,CACEH,SAAU,qBACVC,QAAS,CACP,CACEnB,KAAM,kBACNoB,KAAM,uCACNC,QAAS,gDAEX,CACErB,KAAM,cACNoB,KAAM,4BACNC,QAAS,wBAIf,CACEH,SAAU,uBACVC,QAAS,CACP,CACEnB,KAAM,kBACNoB,KAAM,iCACNC,QAAS,2BAEX,CACErB,KAAM,cACNoB,KAAM,0BACNC,QAAS,kBAIf,CACEH,SAAU,kBACVC,QAAS,CACP,CACEnB,KAAM,gBACNoB,KAAM,iCACNC,QAAS,qCAEX,CACErB,KAAM,kBACNoB,KAAM,4BACNC,QAAS,yBAIf,CACEH,SAAU,yCACVC,QAAS,CACP,CACEnB,KAAM,cACNoB,KAAM,wBACNC,QAAS,2BAEX,CACErB,KAAM,kBACNoB,KAAM,8DACNC,QAAS,uBAEX,CACErB,KAAM,oBACNoB,KAAM,4CACNC,QAAS,6BAIf,CACEH,SAAU,gBACVC,QAAS,CACP,CACEnB,KAAM,eACNoB,KAAM,4BACNC,QAAS,SAEX,CACErB,KAAM,iBACNoB,KAAM,mCACNC,QAAS,oCAIf,CACEH,SAAU,mBACVC,QAAS,CACP,CACEnB,KAAM,cACNoB,KAAM,gCACNC,QAAS,OAEX,CACErB,KAAM,eACNoB,KAAM,8BACNC,QAAS,oBAIf,CACEH,SAAU,iBACVC,QAAS,CACP,CACEnB,KAAM,gBACNoB,KAAM,yCACNC,QAAS,6BAEX,CACErB,KAAM,UACNoB,KAAM,uBACNC,QAAS,8BAMJG,EAAY,CACvB,CACEN,SAAU,iBACVC,QAAS,CACP,CACEnB,KAAM,gBACNoB,KAAM,mCACNC,QAAS,QAEV,CACCrB,KAAM,cACNoB,KAAM,2BACNC,QAAS,wBAIf,CACEH,SAAU,iBACVC,QAAS,CACP,CACEnB,KAAM,kBACNoB,KAAM,0CACNC,QAAS,wBAEX,CACErB,KAAM,sBACNoB,KAAM,sCACNC,QAAS,kDAIf,CACEH,SAAU,qBACVC,QAAS,CACP,CACEnB,KAAM,iBACNoB,KAAM,+BACNC,QAAS,gBAEX,CACErB,KAAM,WACNoB,KAAM,gCACNC,QAAS,+BAEX,CACErB,KAAM,iBACNoB,KAAM,2BACNC,QAAS,kCAIf,CACEH,SAAU,gBACVC,QAAS,CACP,CACEnB,KAAM,iBACNoB,KAAM,iCACNC,QAAS,OAEX,CACErB,KAAM,gBACNoB,KAAM,iCACNC,QAAS,qCAEX,CACErB,KAAM,YACNqB,QAAS,oCAIf,CACEH,SAAU,qBACVC,QAAS,CACP,CACEnB,KAAM,cACNoB,KAAM,uCACNC,QAAS,2BAEX,CACErB,KAAM,iBACNoB,KAAM,4BACNC,QAAS,wBAIf,CACEH,SAAU,uBACVC,QAAS,CACP,CACEnB,KAAM,gBACNoB,KAAM,kCACNC,QAAS,2BAEX,CACErB,KAAM,cACNoB,KAAM,4BACNC,QAAS,+BAIf,CACEH,SAAU,kBACVC,QAAS,CACP,CACEnB,KAAM,oBACNoB,KAAM,8BACNC,QAAS,YAEX,CACErB,KAAM,oBACNoB,KAAM,0BACNC,QAAS,uBAEX,CACErB,KAAM,WACNoB,KAAM,wCACNC,QAAS,uCAIf,CACEH,SAAU,yCACVC,QAAS,CACP,CACEnB,KAAM,mBACNoB,KAAM,4BACNC,QAAS,mDAEX,CACErB,KAAM,YACNoB,KAAM,iCACNC,QAAS,UAIf,CACEH,SAAU,gBACVC,QAAS,CACP,CACEnB,KAAM,eACNoB,KAAM,iCACNC,QAAS,4BAEX,CACErB,KAAM,cACNoB,KAAM,yBACNC,QAAS,UAIb,CACAH,SAAU,iBACVC,QAAS,CACP,CACEnB,KAAM,eACNoB,KAAM,6BACNC,QAAS,qCAEX,CACErB,KAAM,SACNoB,KAAM,2BACNC,QAAS,8BAKf,CACEH,SAAU,iBACVC,QAAS,CACP,CACEnB,KAAM,gBACNoB,KAAM,yCACNC,QAAS,6BAEX,CACErB,KAAM,gBACNoB,KAAM,kCACNC,QAAS,mEAIf,CACEH,SAAU,oBACVC,QAAS,CACP,CACEnB,KAAM,iBAER,CACEA,KAAM,iBAER,CACEA,KAAM,aAER,CACEA,KAAM,eAER,CACEA,KAAM,kBAER,CACEA,KAAM,gBAER,CACEA,KAAM,iBAQDyB,EAAY,CACrB,CACAP,SAAU,iBACVC,QAAS,CACP,CACEnB,KAAM,iBACNoB,KAAM,gDACNC,QAAS,QAEV,CACCrB,KAAM,WACNoB,KAAM,+CACNC,QAAS,YAIf,CACEH,SAAU,iBACVC,QAAS,CACP,CACEnB,KAAM,gBACNoB,KAAM,kDACNC,QAAS,kCAEX,CACErB,KAAM,cACNoB,KAAM,0BACNC,QAAS,wCAIf,CACEH,SAAU,uBACVC,QAAS,CACP,CACEnB,KAAM,kBACNoB,KAAM,oCACNC,QAAS,2BAEX,CACErB,KAAM,eACNoB,KAAM,mCACNC,QAAS,2BAIf,CACEH,SAAU,kBACVC,QAAS,CACP,CACEnB,KAAM,oBACNoB,KAAM,8BACNC,QAAS,YAEX,CACErB,KAAM,aACNoB,KAAM,2CACNC,QAAS,aAEX,CACErB,KAAM,cACNoB,KAAM,sBACNC,QAAS,gCAIf,CACEH,SAAU,iBACVC,QAAS,CACP,CACEnB,KAAM,eACNoB,KAAM,6BACNC,QAAS,qCAEX,CACErB,KAAM,YACNoB,KAAM,oBACNC,QAAS,gCAIf,CACEH,SAAU,gBACVC,QAAS,CACP,CACEnB,KAAM,mBACNoB,KAAM,kDACNC,QAAS,QAEX,CACErB,KAAM,cACNoB,KAAM,gCACNC,QAAS,eAIf,CACEH,SAAU,qBACVC,QAAS,CACP,CACEnB,KAAM,iBACNoB,KAAM,6BACNC,QAAS,2BAEX,CACErB,KAAM,kBACNoB,KAAM,iCACNC,QAAS,sBAEX,CACErB,KAAM,mBACNoB,KAAM,kCACNC,QAAS,iCC1nBFK,EATqB,WAClC,OACE,gCACE,cAAC,EAAD,mCACA,cAAC,EAAD,CAAS5B,MAAOwB,QCmEPK,EA1EqB,WAClC,OACE,gCACE,cAAC,EAAD,oCACA,qPACA,4CACA,+BACA,6BAAI,gEACF,+BACE,yMACA,6JACA,sEAAwC,mBAAGP,KAAK,gBAAR,8BAAxC,0BAA2G,mBAAGA,KAAK,sBAAR,oCAA3G,UAED,6BAAI,uGACL,+BACE,sKACA,oPAEF,6BAAI,kGACJ,+BACE,qPACA,4OAEF,6BAAI,0GACJ,+BACE,sRACA,qPACA,ihBACA,sLAGJ,6CACA,+BACE,+BAAI,wCAAJ,6JACA,+BAAI,4CAAJ,8FACA,+BACE,iRACA,kaACA,oTACA,wVAEF,+BAAI,8CAAJ,iJACA,+BACE,8EACA,8EACA,6EACA,uDACA,0EACA,0EACA,uEACA,oDACA,qDACA,wDAEJ,+BAAI,iDAAJ,2GACA,+BACI,gLACA,gQACA,qQACA,8QACA,iNAGJ,2CACA,8BAAG,iDAAH,8eACA,8BAAG,wDAAH,q0BACA,8BAAG,kDAAH,ouBACA,8BAAG,uEAAH,gkBAEA,oCAAM,mFAAqD,mBAAGA,KAAK,gBAAR,6BAArD,OAAN,WCxEOtB,EAAQ,CACnB,CACEqB,QAAS,CACP,CACEnB,KAAM,oBACNoB,KAAM,oDACNC,QAAS,mCAEX,CACErB,KAAM,mBACNoB,KAAM,iDACNC,QAAS,2BAEX,CACErB,KAAM,wBACNoB,KAAM,4BACNC,QAAS,yDAEX,CACErB,KAAM,SACNoB,KAAM,mCACNC,QAAS,oBAEX,CACErB,KAAM,kBACNoB,KAAM,8BACNC,QAAS,eAEX,CACErB,KAAM,eACNoB,KAAM,6CACNC,QAAS,2BAEX,CACErB,KAAM,cACNoB,KAAM,wCACNC,QAAS,aAEX,CACErB,KAAM,cACNoB,KAAM,2BACNC,QAAS,gCAMJO,EAAS,CACpB,CACET,QAAS,CACP,CACEnB,KAAM,mBACNoB,KAAM,kCACNC,QAAS,2BAEV,CACCrB,KAAM,cACNoB,KAAM,uCACNC,QAAS,yCAEV,CACCrB,KAAM,gBACNoB,KAAM,sEACNC,QAAS,mCAEV,CACCrB,KAAM,eACNoB,KAAM,4CACNC,QAAS,2BAEV,CACCrB,KAAM,kBACNoB,KAAM,qDACNC,QAAS,wBAEV,CACCrB,KAAM,oBACNoB,KAAM,iEACNC,QAAS,iCAEV,CACCrB,KAAM,sBACNoB,KAAM,8BACNC,QAAS,8CAEV,CACCrB,KAAM,gBACNoB,KAAM,4BACNC,QAAS,qCCrEFQ,EAde,WAC5B,OACE,gCACE,cAAC,EAAD,6BACA,gCACA,+BACA,6BAAI,cAAC,EAAD,CAAS/B,MAAOA,MACpB,6BAAI,cAAC,EAAD,CAASA,MAAO8B,eCuBXE,G,MA/BU,WACvB,OACA,gCACI,cAAC,EAAD,uBACF,mCACA,oBAAI5D,UAAU,+BAAd,2BACA,uBAAOA,UAAU,qBAAjB,SACE,+BACE,6BAAI,qBAAKC,IAAK4D,EAAQ,IAA8BC,QAASjC,MAAM,UACnE,6BAAI,qBAAK5B,IAAK4D,EAAQ,IAAoCC,QAASjC,MAAM,UACzE,6BAAI,qBAAK5B,IAAK4D,EAAQ,IAA8BC,QAASjC,MAAM,eAGvE,oBAAI7B,UAAU,iCAAd,6BACA,uBAAOA,UAAU,qBAAjB,SACE,+BACE,6BAAI,qBAAKC,IAAK4D,EAAQ,IAAgCC,QAASjC,MAAM,UACrE,6BAAI,qBAAK5B,IAAK4D,EAAQ,IAAgCC,QAASjC,MAAM,eAGzE,oBAAI7B,UAAU,iCAAd,6BACA,uBAAOA,UAAU,qBAAjB,SACE,6BACA,6BAAI,qBAAKC,IAAK4D,EAAQ,IAAiDC,QAASjC,MAAM,sBCmC7EkC,EA3DW,WACxB,OACE,gCACE,cAAC,EAAD,0BADF,mCA4BgC,uBA5BhC,gDA6B6C,uBA7B7C,6CA8B0C,uBA9B1C,mCA+BgC,uBA/BhC,gCAgC6B,uBAhC7B,iDAiC2C,uBAjC3C,yCAkCsC,uBAlCtC,mDAmC2C,uBAnC3C,oCAoCiC,uBApCjC,mCAqCgC,uBArChC,2CAsCwC,uBAtCxC,2BAuCwB,uBAvCxB,6CAwC0C,uBAxC1C,4CAyCyC,uBAzCzC,0CA0CuC,uBA1CvC,wCA2CqC,uBA3CrC,0FA4CuF,uBA5CvF,uBA6CoB,uBA7CpB,0CA8CuC,uBA9CvC,qCA+CkC,uBA/ClC,wBAgDqB,uBAhDrB,8CAiD2C,uBAjD3C,oCAkDiC,uBAlDjC,6CAmD0C,uBAC1C,2BCsDWC,EA5GkB,WAC/B,OACE,gCACE,cAAC,EAAD,gCADF,gCAG6B,uBAH7B,mDAIgD,uBAJhD,+BAK4B,uBAL5B,mCAMgC,uBANhC,+CAO4C,uBAP5C,2CAQwC,uBARxC,kDAS+C,uBAT/C,iDAU8C,uBAV9C,uDAWoD,uBAXpD,gDAY6C,uBAZ7C,gBAaa,uBAbb,uCAcoC,uBAdpC,6CAe0C,uBAf1C,sCAgBmC,uBAhBnC,4CAiByC,uBAjBzC,iDAkB8C,uBAlB9C,6CAmB0C,uBAnB1C,+BAoB4B,uBApB5B,+CAqB4C,uBArB5C,kDAsB+C,uBAtB/C,oDAuBiD,uBAvBjD,sDAwBmD,uBAxBnD,8DAyB2D,uBAzB3D,gDA0B6C,uBA1B7C,6CA2B0C,uBA3B1C,+BA4B4B,uBA5B5B,2DA6BwD,uBA7BxD,+DA8B4D,uBA9B5D,8CA+B2C,uBA/B3C,2CAgCwC,uBAhCxC,mDAiCgD,uBAjChD,oCAkCiC,uBAlCjC,0BAmCuB,uBAnCvB,iEAoC8D,uBApC9D,4CAqCyC,uBArCzC,qDAsCkD,uBAtClD,4DAuCmD,uBAvCnD,wCAwCqC,uBAxCrC,yCAyCgC,uBAzChC,oCA0CiC,uBA1CjC,4DA2CyD,uBA3CzD,2CA4CwC,uBA5CxC,6CA6C0C,uBA7C1C,wCA8CqC,uBA9CrC,6CA+C0C,uBA/C1C,kCAgD+B,uBAhD/B,kCAiD+B,uBAjD/B,0CAkDuC,uBAlDvC,yBAmDsB,uBAnDtB,wCAoDqC,uBApDrC,yDAqDsD,uBArDtD,kEAsD+D,uBAtD/D,yCAuDsC,uBAvDtC,sBAwDmB,uBAxDnB,sCAyDmC,uBAzDnC,sCA0DmC,uBA1DnC,4CA2DsC,uBA3DtC,qCA4DkC,uBA5DlC,6BA6D0B,uBA7D1B,2CA8DwC,uBA9DxC,8CA+D2C,uBA/D3C,yCAgEsC,uBAhEtC,yCAiEsC,uBAjEtC,gEAkE6D,uBAlE7D,iCAmE8B,uBAnE9B,uBAoEoB,uBApEpB,0CAqEuC,uBArEvC,+CAsE4C,uBAtE5C,gDAuE6C,uBAvE7C,uCAwEoC,uBAxEpC,yCAyEsC,uBAzEtC,iBA0Ec,uBA1Ed,8CA2E2C,uBA3E3C,4CA4EyC,uBA5EzC,wCA6EqC,uBA7ErC,8CA8E2C,uBA9E3C,4CA+EyC,uBA/EzC,2CAgFwC,uBAhFxC,gDAiF6C,uBAjF7C,sCAkFmC,uBAlFnC,qBAmFkB,uBAnFlB,0CAoFoC,uBApFpC,kDAqF+C,uBArF/C,iCAsF8B,uBAtF9B,uCAuFoC,uBAvFpC,qCAwFkC,uBAxFlC,sCAyFmC,uBAzFnC,sCA0FmC,uBA1FnC,0CA2FuC,uBA3FvC,wDA4FqD,uBA5FrD,oCA6FiC,uBA7FjC,4CA8FyC,uBA9FzC,sCA+FmC,uBA/FnC,iCAgG8B,uBAhG9B,uCAiGoC,uBAjGpC,mCAkGgC,uBAlGhC,sCAmGmC,uBAnGnC,qCAoGkC,uBAClC,2BC9BWC,G,MAtEO,WACpB,OACE,gCACE,cAAC,EAAD,yDACA,sBAAKC,MAAM,2BAAX,UACE,krBAYA,+uBAWqC,iIAxBvC,4DA2BE,+BACA,gFACA,0EACA,oFACA,6EACA,+EACA,uGAGA,kFACA,qFACA,wDACA,uHAEgC,OAEhC,4FAEE,2CAAa,mBAAGhB,KAAK,kEAAR,6EAAb,UAEJ,uBACA,gCACA,2CACA,+BACA,0GAA4E,mBAAGiB,KAAK,GAAR,kBAA5E,+BACA,2FACA,yFAA2D,mBAAGjB,KAAK,yCAAR,wCAA3D,QACA,8EAAgD,mBAAGA,KAAK,4CAAR,iCAAhD,cAGA,uBACA,sBAAKgB,MAAM,2BAAX,UACA,iDACA,cAAC,EAAD,CAAYtC,MAAOK,YCwEVmC,EA3IY,WACzB,OACE,gCACE,cAAC,EAAD,0BACA,+PACA,6BAAI,sBAAMC,MAAM,WAAZ,wCACJ,kCACA,+BAAI,gFACJ,6BACA,+BAAI,4CAAJ,wFAGA,+BAAI,yDACJ,6BACA,qKAGA,+BAAI,yDACJ,6BACA,oIAGA,6BAAI,oDACJ,+BAAI,kIAAwG,uBAC5G,+BACA,6LACA,qHACA,2MAGA,+BAAI,2HAAiG,uBACrG,+BACE,qLACA,4LACA,wKACA,6LACA,sNACA,mLACA,2LACA,qKACA,wJACA,uKAGF,+BAAI,wHAA8F,uBAClG,+BACE,4LACA,wHACA,sIACA,oHACA,mJACA,uIACA,6QACA,yIACA,sMACA,6KAGF,6BAAI,iFAEJ,6BAAI,yGAGJ,uBACA,6BAAI,sBAAMA,MAAM,WAAZ,yCACJ,kCACA,6BAAI,8EACJ,6BACA,+BAAI,4CAAJ,wEAEA,6BAAI,2DACJ,6BACA,2HAEA,6BAAI,oDACJ,+BAAI,yIACJ,+BACA,iQACA,gJACA,0GAGA,6BAAI,gFAGJ,uBACA,6BAAI,sBAAMA,MAAM,WAAZ,2CACJ,kCACA,6BAAI,6EACJ,6BACA,+BAAI,4CAAJ,qEAEA,6BAAI,2DACJ,6BACA,8NAEA,6BAAI,oDACJ,+BAAI,6HACJ,+BACA,iFACA,4HACA,kJAGA,+BAAI,2HACJ,+BACA,4KACA,qIACA,mMACA,iQACA,uJACA,iKACA,gOACA,gJACA,8KACA,4NAGA,+BAAI,wHACJ,+BACA,+KACA,wGACA,qJACA,yGACA,mLACA,kLACA,6IACA,8JACA,wLACA,4LAGA,6BAAI,6IAEJ,2BCnGSC,EAnCgB,WAC7B,OACE,gCACE,cAAC,EAAD,+BACA,kCAEA,oCAAM,mBAAGpB,KAAK,2BAAR,2BAAN,0CAEN,+BAAI,uCAAJ,iDAEA,6BAAI,8BAAG,0CAAH,qdAGJ,oCAAM,mBAAGA,KAAK,wCAAR,yBAAN,8CAEA,+BAAI,uCAAJ,gFAEA,6BAAI,8BAAG,0CAAH,imBAGJ,oCAAM,mBAAGA,KAAK,6CAAR,0BAAN,wCACA,+BAAI,uCAAJ,+IACA,6BAAI,8BAAG,0CAAH,gMAGJ,oCAAM,mBAAGA,KAAK,sEAAR,2BAAN,mCAEA,+BAAI,uCAAJ,kDAEA,6BAAI,8BAAG,0CAAH,ikBCkDWqB,EA9Ee,WAC5B,OACE,gCACE,cAAC,EAAD,8BAED,0ZAEJ,2CALG,oCAOH,+BACA,mCAAK,4DAA8B,uDAA9B,oDAAL,OACA,6BAAI,mEACJ,6BAAI,oFACJ,6BAAI,kEACJ,6BAAI,sFACJ,gJAEA,2CAAa,mBAAGrB,KAAK,kEAAR,6EAAb,OACA,yCACA,kuBAEA,0oBAEA,kDACA,kTACA,0FAEA,+BACA,gFACA,0EACA,oFACA,6EACA,+EACA,uGACA,kFACA,qFACA,wDACA,wHACA,+FAIA,qDACA,8fAEA,uEACA,0rBAEA,mEAAqC,mBAAGA,KAAK,qFAAqFsB,UAAQ,EAArG,wCAArC,0HAEA,yEAA2C,mBAAGtB,KAAK,kEAAR,6EAA3C,yFAEA,4DACA,qjCAEA,4JAEA,6VAEA,kDACA,m6CAEA,0VAEA,iDACA,iJAEA,0RAEA,kYAnEG,qDAqE4C,uBArE5C,wCAuEH,0BC5BcuB,EA9Ce,WAC5B,OACE,gCACE,cAAC,EAAD,8BACF,uaACA,4CACE,+BACA,6BAAI,gEACF,+BACE,yMACA,sEAAwC,mBAAGvB,KAAK,gBAAR,8BAAxC,0BAA2G,mBAAGA,KAAK,sBAAR,oCAA3G,OACA,+OAED,6BAAI,8EACL,6BACE,0LAEF,6BAAI,yFACJ,+BACE,wJACA,qMACA,4KAEF,6BAAI,2GACJ,+BACE,gPACA,kJAEF,6BAAI,mGACJ,+BACE,0MACA,qGACA,uPAEF,6BAAI,4FACJ,+BACE,mcACA,2IAIJ,oCAAM,8EAAgD,mBAAGA,KAAK,gBAAR,6BAAhD,QAAgG,mBAAGA,KAAK,uBAAR,mCAAhG,OAAN,WCrBSwB,EApBa,WAC1B,OACE,gCACE,cAAC,EAAD,8BACD,yCACA,ySACD,qCACC,0lBACD,wCACC,m0CACD,wCACC,uoBACD,qDACC,onBACA,0HCGUA,GAjBa,WAC1B,OACE,gCACE,cAAC,EAAD,oCACF,8HACA,yVACA,+BACA,6BAAI,2FADJ,4VAGA,6BAAI,kGAHJ,qYCKWC,GAVa,WAC1B,OACE,gCACE,cAAC,EAAD,wCACF,gcAAka,IAAla,YAAgb,IAAhb,0SCuDWC,I,MA3Dc,WAC3B,OACE,gCACE,cAAC,EAAD,qCACA,iEAAmC,mBAAGT,KAAK,uEAAR,SAA+E,kEAAlH,KAA8J,4CAA9J,WAAwL,4CAAxL,8aAGA,oEAAsC,mBAAGA,KAAK,GAAR,kBAAtC,4FAEA,mbACA,wBAAOnE,UAAU,0BAA0B6E,OAAO,IAAlD,UACE,+BACE,sCACA,kDACA,4DACA,+DAEF,+BACE,6BAAI,mBAAGV,KAAK,4CAAR,mDACJ,2CACA,qCACA,wCAEF,+BACE,6BAAI,mBAAGA,KAAK,2BAAR,4BACJ,2CACA,sCACA,uCAEF,+BACE,6BAAI,mBAAGA,KAAK,kHAAR,4CACJ,2CACA,sCACA,wCAEF,+BACE,6BAAI,mBAAGA,KAAK,6EAAR,8CACJ,2CACA,sCACA,wCAEF,+BACE,6BAAI,mBAAGA,KAAK,kDAAR,oCACJ,2CACA,sCACA,wCAEF,+BACE,6BAAI,mBAAGA,KAAK,6BAAR,kCACJ,2CACA,qCACA,2CAGJ,4BClCSxC,I,MArBI,SAAC,GAAa,IAAZC,EAAW,EAAXA,MACnB,OACE,wBAAO5B,UAAU,mBAAjB,UACE,gCACE,+BACE,oBAAI6B,MAAM,MAAV,kBACA,2CAGJ,gCACGD,EAAMN,KAAI,gBAAGQ,EAAH,EAAGA,KAAMC,EAAT,EAASA,KAAT,OACT,+BACE,6BAAKD,IACL,6BAAKC,iBCFF+C,GATgB,WAC7B,OACE,qCACE,cAAC,EAAD,8BACA,cAAC,GAAD,CAAYlD,MAAOM,QCYVI,I,MAjBI,SAAC,GAAe,IAAbV,EAAY,EAAZA,MACpB,OACE,qBAAK5B,UAAU,mBAAf,SACG4B,EAAMN,KAAI,gBAAEiB,EAAF,EAAEA,MAAOC,EAAT,EAASA,OAAQuC,EAAjB,EAAiBA,SAAUtC,EAA3B,EAA2BA,YAA3B,OACT,6BACE,+BACE,oBAAIzC,UAAU,oBAAd,SAAmCuC,IAClCC,EAAO,uBACR,mBAAGU,KAAM8B,GAAyBD,EAAWE,OAAO,SAASC,IAAI,sBAAjE,0BACA,cAAC,IAAD,CAAcxC,MAAO,EAArB,SAAyBD,gBC0BtB0C,GAvCU,CACvB,CACE5C,MAAO,0EACPC,OACE,+BACFuC,SAAU,sCAEZ,CACExC,MAAO,yDACPC,OACE,wGACFuC,SAAU,wCAEZ,CACExC,MAAO,uDACPC,OACE,4BACFuC,SAAU,oCAEZ,CACExC,MAAO,wDACPC,OACE,6CACFuC,SAAU,8CAEZ,CACExC,MAAO,4DACPC,OACE,mEACFuC,SAAU,qCAEZ,CACExC,MAAO,wFACPC,OACE,kHACFuC,SAAU,uCChBCK,GAdkB,WAC/B,OAKE,cAJA,CAIA,iBACE,cAAC,EAAD,gCACA,cAAC,GAAD,CAAYxD,MAAOuD,SCOV7C,I,MAhBI,SAAC,GAAe,IAAbV,EAAY,EAAZA,MACpB,OACE,qBAAK5B,UAAU,mBAAf,SACG4B,EAAMN,KAAI,gBAAEiB,EAAF,EAAEA,MAAOC,EAAT,EAASA,OAAQC,EAAjB,EAAiBA,YAAjB,OACT,6BACE,+BACE,oBAAIzC,UAAU,oBAAd,SAAmCuC,IAClCC,EACD,cAAC,IAAD,CAAcE,MAAO,EAArB,SAAyBD,gBC+BtBE,GA3CQ,CACrB,CACEJ,MAAO,iDACPC,OACE,+BAEJ,CACED,MAAO,+BACPC,OACE,6DAEJ,CACED,MAAO,+DACPC,OACE,wFAEJ,CACED,MAAO,kFACPC,OACE,6CAEJ,CACED,MAAO,6FACPC,OACE,6GAEJ,CACED,MAAO,6CACPC,OACE,kEAEJ,CACED,MAAO,wEACPC,OACE,0CAEJ,CACED,MAAO,gEACPC,OACE,kCCwFSG,GA/HQ,CACrB,CACEJ,MAAO,8DACPC,OAAQ,+BAER,CACAD,MAAO,8FACPC,OAAQ,yGAER,CACAD,MAAO,iCACPC,OAAQ,yDAER,CACAD,MAAO,sFACPC,OAAQ,mEAER,CACAD,MAAO,4EACPC,OAAQ,gBAER,CACAD,MAAO,+HACPC,OAAQ,mEAER,CACAD,MAAO,sDACPC,OAAQ,2CAER,CACAD,MAAO,gDACPC,OAAQ,yFAER,CACAD,MAAO,+EACPC,OAAQ,yFAER,CACAD,MAAO,2FACPC,OAAQ,yEAER,CACAD,MAAO,wDACPC,OAAQ,iEAER,CACAD,MAAO,sFACPC,OAAQ,8FAER,CACAD,MAAO,4CACPC,OAAQ,iBAER,CACAD,MAAO,sDACPC,OAAQ,4BAER,CACAD,MAAO,+DACPC,OAAQ,2DAER,CACAD,MAAO,yEACPC,OAAQ,qEAER,CACAD,MAAO,4FACPC,OAAQ,uHAER,CACAD,MAAO,6CACPC,OAAQ,8DAER,CACAD,MAAO,mDACPC,OAAQ,wEAER,CACAD,MAAO,sEACPC,OAAQ,0FAER,CACAD,MAAO,qGACPC,OAAQ,yCAER,CACAD,MAAO,mGACPC,OAAQ,4EAER,CACAD,MAAO,uDACPC,OAAQ,wDAER,CACAD,MAAO,iFACPC,OAAQ,qFAER,CACAD,MAAO,6DACPC,OAAQ,0CAER,CACAD,MAAO,iFACPC,OAAQ,4EAER,CACAD,MAAO,oDACPC,OAAQ,qFAER,CACAD,MAAO,+CACPC,OAAQ,qLAER,CACAD,MAAO,+EACPC,OAAQ,2DAER,CACAD,MAAO,yEACPC,OAAQ,+DAER,CACAD,MAAO,kEACPC,OAAQ,2BCnHGG,GARQ,CACrB,CACEJ,MAAO,+BACPC,OACE,8DCoBS6C,GAlBgB,WAC7B,OAKE,cAJA,CAIA,iBACE,cAAC,EAAD,8BACA,6BAAI,mDACJ,cAAC,GAAD,CAAYzD,MAAO0D,KACnB,6BAAI,uCACJ,cAAC,GAAD,CAAY1D,MAAOiB,KACnB,6BAAI,yCACJ,cAAC,GAAD,CAAYjB,MAAOkB,SCOVC,I,MAvBC,SAAC,GAAe,IAAbnB,EAAY,EAAZA,MACjB,OACE,uBAAO5B,UAAU,gBAAjB,SACG4B,EAAMN,KAAI,gBAAG0B,EAAH,EAAGA,SAAUC,EAAb,EAAaA,QAAb,OACT,6BACE,+BACE,oBAAIjD,UAAU,oBAAd,SAAmCgD,IAClCC,EAAQ3B,KAAI,gBAAGQ,EAAH,EAAGA,KAAMoB,EAAT,EAASA,KAAMC,EAAf,EAAeA,QAAf,OACX,sBAAKnD,UAAU,kBAAf,UACE,mBAAGA,UAAU,wBAAwBkD,KAAMA,EAA3C,SACGpB,IAEH,uBACA,sBAAM9B,UAAU,mBAAhB,SAAoCmD,sBCDrCoC,GATqB,WAClC,OACE,gCACE,cAAC,EAAD,mCACA,cAAC,GAAD,CAAS3D,MAAOyB,QCmEPmC,GA1EqB,WAClC,OACE,gCACE,cAAC,EAAD,oCACA,qPACA,4CACA,+BACA,6BAAI,gEACF,+BACE,yMACA,6JACA,sEAAwC,mBAAGtC,KAAK,gBAAR,8BAAxC,0BAA2G,mBAAGA,KAAK,sBAAR,oCAA3G,UAED,6BAAI,sGACL,+BACE,sKACA,oPAEF,6BAAI,mGACJ,+BACE,qPACA,4OAEF,6BAAI,2GACJ,+BACE,sRACA,qPACA,ihBACA,sLAGJ,6CACA,+BACE,+BAAI,wCAAJ,6JACA,+BAAI,4CAAJ,8FACA,+BACE,iRACA,kaACA,oTACA,wVAEF,+BAAI,8CAAJ,iJACA,+BACE,8EACA,8EACA,6EACA,uDACA,0EACA,0EACA,uEACA,oDACA,qDACA,wDAEJ,+BAAI,iDAAJ,2GACA,+BACI,gLACA,gQACA,qQACA,8QACA,iNAGJ,2CACA,8BAAG,iDAAH,8eACA,8BAAG,wDAAH,q0BACA,8BAAG,kDAAH,ouBACA,8BAAG,uEAAH,gkBAEA,oCAAM,mFAAqD,mBAAGA,KAAK,gBAAR,6BAArD,OAAN,WCrDSuC,GAde,WAC5B,OACE,gCACE,cAAC,EAAD,6BACA,gCACA,+BACA,6BAAI,cAAC,GAAD,CAAS7D,MAAOA,MACpB,6BAAI,cAAC,GAAD,CAASA,MAAO8B,eCuBXgC,I,MA/BU,WACvB,OACE,gCACA,cAAC,EAAD,uBACF,mCACA,oBAAI1F,UAAU,+BAAd,2BACA,uBAAOA,UAAU,qBAAjB,SACE,+BACE,6BAAI,qBAAKC,IAAK4D,EAAQ,IAA8BC,QAASjC,MAAM,UAEnE,6BAAI,qBAAK5B,IAAK4D,EAAQ,IAAiCC,QAASjC,MAAM,eAG1E,oBAAI7B,UAAU,iCAAd,6BACA,uBAAOA,UAAU,qBAAjB,SACE,6BAEE,6BAAI,qBAAKC,IAAK4D,EAAQ,IAAiCC,QAASjC,MAAM,cAG1E,oBAAI7B,UAAU,iCAAd,6BACA,uBAAOA,UAAU,qBAAjB,SACE,6BACA,6BAAI,qBAAKC,IAAK4D,EAAQ,IAAiDC,QAASjC,MAAM,sBCC3E8D,GAzBW,WACxB,OACE,gCACE,cAAC,EAAD,0BADF,gDAE+C,uBAF/C,mDAGkD,uBAHlD,mCAIkC,uBAJlC,wCAKuC,uBALvC,yCAMwC,uBANxC,0CAOyC,uBAPzC,0CAQyC,uBARzC,iCASgC,uBAThC,wCAUuC,uBAVvC,2CAW0C,uBAX1C,4CAY2C,uBAZ3C,oCAamC,uBAbnC,gDAc+C,uBAd/C,4CAe2C,uBAf3C,+CAgB8C,uBAhB9C,6CAiB4C,uBAjB5C,oCCQWC,GAVkB,WAC/B,OACE,gCACE,cAAC,EAAD,gCADF,UCyFWC,I,MAxFO,WACpB,OACE,gCACE,cAAC,EAAD,yDACA,sBAAK3B,MAAM,2BAAX,UACE,krBAYA,0rBAYA,+uBAWqC,sIApCvC,4DAwCE,+BACA,gFACA,0EACA,oFACA,6EACA,+EACA,uGAGA,kFACA,qFACA,wDACA,uHAEgC,OAEhC,yFACA,uEAEA,2CAAa,mBAAGhB,KAAK,kEAAR,6EAAb,UAEF,uBACA,gCACA,2CACA,+BACA,kHAAoF,mBAAGA,KAAK,iaAAR,sBACpF,8EAAgD,8DAAhD,0CAA2H,mBAAGA,KAAK,+DAAR,sBAC3H,kFAAoD,iDAApD,0BAAkG,6DAAlG,yEACA,oHACA,+JACA,wFAA0D,mBAAGA,KAAK,sDAAR,uCAA1D,QACA,8EAAgD,mBAAGA,KAAK,4CAAR,iCAAhD,QACA,0GAA4E,mBAAGiB,KAAK,kDAAR,kBAA5E,eAGA,uBACA,sBAAKD,MAAM,2BAAX,UACA,iDACA,cAAC,GAAD,CAAYtC,MAAOM,YCmJV4D,GAxOY,WACzB,OACE,gCACE,cAAC,EAAD,0BACA,iEAAmC,8DAAnC,0CAA8G,mBAAG5C,KAAK,+DAAR,sBAC9G,6BAAI,sBAAMmB,MAAM,WAAZ,yCACJ,kCACA,6BAAI,8DAKJ,6BAAI,uDAKJ,+BAAI,sEACJ,6BACA,yFAGA,6BAAI,wDACJ,+BACA,yGACA,yIACA,4EACA,iIACA,uHACA,0KACA,iIACA,2FACA,oGAEA,6BAAI,sDACJ,+BACA,0FACA,wEACA,2GAEA,6BAAI,yEAyCJ,uBACA,6BAAI,sBAAMA,MAAM,WAAZ,2CACJ,kCACA,6BAAI,gEACJ,6BAAI,0EACJ,6BACA,4EAEA,6BAAI,wDAIJ,6BAAI,2DACJ,+BACA,wFACA,2HACA,uIACA,oGACA,2HACA,qHACA,8GACA,6FACA,2EACA,+GAEA,6BAAI,sDACJ,6BAAI,yEACJ,6BACA,+EAEA,6BAAI,gEACJ,6BAAI,0EACJ,6BAAI,uDACJ,+BACA,4HACA,uIACA,6GAEA,6BAAI,6EAEJ,uBACA,6BAAI,sBAAMA,MAAM,WAAZ,0CACJ,kCACA,6BAAI,gEACJ,6BAAI,iEACJ,6BAAI,wDAIJ,6BAAI,4DACJ,+BACA,4GACA,sHACA,yIACA,0FACA,gGACA,mHACA,kJACA,qHACA,6GACA,uGAKA,6BAAI,sDACJ,6BAAI,qEAsDJ,uBACA,6BAAI,sBAAMA,MAAM,WAAZ,wCACJ,kCACA,6BAAI,gEACJ,6BAAI,yDACJ,+BACA,mHACA,2FAEA,6BAAI,wDACJ,6BAAI,2DACJ,+BACA,+IACA,mGACA,6HACA,yGACA,6HACA,gGACA,2FACA,8HACA,yIACA,6FAEA,6BAAI,wDACJ,6BAAI,wDAEJ,2BC1MS0B,GAzBgB,WAC3B,OACI,gCACI,cAAC,EAAD,+BACA,kCAEA,oCAAM,mBAAG7C,KAAK,yCAAR,4BAAN,8BACA,+BAAI,uCAAJ,mDACA,6BAAI,8BAAG,0CAAH,wtBAGJ,oCAAM,mBAAGA,KAAK,8CAAR,2BAAN,4EACA,+BAAI,uCAAJ,0DACA,6BAAI,8BAAG,0CAAH,koBAGJ,oCAAM,mBAAGA,KAAK,gDAAR,4BAAN,0DACA,+BAAI,uCAAJ,+CACA,6BAAI,8BAAG,0CAAH,goBCiED8C,GAlFe,WAC5B,OACE,gCACE,cAAC,EAAD,8BAED,0ZAEJ,2CALG,oCAOH,+BACA,mCAAK,yGAAL,OACA,+DACA,iFACA,+DACA,mFACA,6IACA,6BAAI,6FAEJ,2CAAa,mBAAG9C,KAAK,kEAAR,6EAAb,OACA,yCACA,kuBAEA,0oBAEA,kDACA,kTACA,0FAEA,+BACA,gFACA,0EACA,oFACA,6EACA,+EACA,uGACA,kFACA,qFACA,wDACA,wHACA,yFACA,0EAIA,qDACA,8fAEA,uEACA,0rBAEA,mEAAqC,mBAAGA,KAAK,uFAAuFsB,UAAQ,EAAvG,wCAArC,0HAEA,yEAA2C,mBAAGtB,KAAK,kEAAR,6EAA3C,sFAEA,4DACA,qjCAEA,4JAEA,6VAEA,kDACA,m6CAEA,0VAEA,iDACA,iJAEA,0RAEA,kYAEA,sFAAwD,uBAAxD,+BAGA,qEAAuC,uBAAvC,mCC1Bc+C,GAlDiB,WAC9B,OACE,gCACE,cAAC,EAAD,uCAED,4iCACJ,sDAEA,0gBAAke,mBAAG/C,KAAK,wCAAR,kBAAle,2NAEC,8BAAG,qDAAH,4QAEA,gHAAkF,mBAAGA,KAAK,uCAAR,2CAAlF,QACD,+BACA,oGAAsE,mBAAGA,KAAK,6CAAR,kBAAtE,mGACC,+BACA,yDACA,uHACA,iFACA,iEACA,sEAND,wMASA,+PACC,+BACA,uDACA,sEACA,qDACA,oEAIA,8CACA,yJACA,+BACD,iJACA,kJACA,8VAGA,2CACA,+BACA,qEACA,uEACA,8ECCcgD,GA9Ce,WAC5B,OACE,gCACE,cAAC,EAAD,8BACF,uaACA,4CACE,+BACA,6BAAI,gEACF,+BACE,yMACA,sEAAwC,mBAAGhD,KAAK,gBAAR,8BAAxC,0BAA2G,mBAAGA,KAAK,sBAAR,oCAA3G,OACA,+OAED,6BAAI,6EACL,6BACE,0LAEF,6BAAI,wFACJ,+BACE,wJACA,qMACA,4KAEF,6BAAI,yGACJ,+BACE,gPACA,kJAEF,6BAAI,oGACJ,+BACE,0MACA,qGACA,uPAEF,6BAAI,4FACJ,+BACE,mcACA,2IAIJ,oCAAM,8EAAgD,mBAAGA,KAAK,gBAAR,6BAAhD,QAAgG,mBAAGA,KAAK,uBAAR,mCAAhG,OAAN,WCrBSiD,GApBa,WAC1B,OACE,gCACE,cAAC,EAAD,8BACD,yCACA,ySACD,qCACC,0lBACD,wCACC,m0CACD,wCACC,uoBACD,qDACC,onBACA,0HCGUA,GAjBa,WAC1B,OACE,gCACE,cAAC,EAAD,oCACF,8HACA,yVACA,+BACA,6BAAI,2FADJ,4VAGA,6BAAI,kGAHJ,qYCKWC,GAVa,WAC1B,OACE,gCACE,cAAC,EAAD,wCACF,yCC6BWC,I,MAjCc,WAC3B,OACE,gCACE,cAAC,EAAD,qCACA,uCACA,iEAAmC,mBAAGlC,KAAK,wzBAAR,SAAg0B,4DAAn2B,KAAy4B,4CAAz4B,gBAAq6B,4CAAr6B,kGACA,8CACA,oEAAsC,mBAAGA,KAAK,kDAAR,kBAAtC,qGACA,uDACA,wKAA0I,mBAAGA,KAAK,+BAAR,kBAA1I,QACA,+DACA,iVACA,yLACA,0LACA,qBAAKlE,IAAK4D,EAAQ,IAAgCC,QAASjC,MAAM,MAAM1B,IAAI,kCAC3E,mQACA,qBAAKF,IAAK4D,EAAQ,IAAgCC,QAASjC,MAAM,MAAM1B,IAAI,kCAC3E,sIACA,qBAAKF,IAAK4D,EAAQ,IAAgCC,QAASjC,MAAM,MAAM1B,IAAI,kCAC3E,sDACA,4GACA,+BACE,6BAAI,mBAAG+C,KAAK,6DAAR,4CACJ,6BAAI,mBAAGA,KAAK,8BAAR,6BACJ,6BAAI,mBAAGA,KAAK,kDAAR,qCACJ,6BAAI,mBAAGA,KAAK,4CAAR,0CACJ,6BAAI,mBAAGA,KAAK,oCAAR,uCACJ,6BAAI,mBAAGA,KAAK,qCAAR,6CCRGoD,GApByB,WACtC,OACE,gCACE,cAAC,EAAD,wCACA,yDACA,+BACA,6BAAI,oFACJ,6BAAI,2GAEJ,2DACA,6BACE,+BAAI,4HAAJ,SAEF,sDACA,8BAAG,0IAAH,OACA,8BAAG,sDAAwB,mBAAGpD,KAAI,2BAAP,+BAAxB,4FAAH,W,4CCfAqD,GAAW,SAAC,GAAD,IAAGC,EAAH,EAAGA,MAAOC,EAAV,EAAUA,KAAMC,EAAhB,EAAgBA,KAAMC,EAAtB,EAAsBA,UAAWC,EAAjC,EAAiCA,SAAUC,EAA3C,EAA2CA,gBAA3C,OACf,gCACE,6BAAKL,IACL,oBAAGM,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,mBAA0DP,KAC1D,mBAAGK,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,gCACA,mBAAGF,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,SAAmD,mBAAG9D,KAAMwD,EAAT,SAAgBA,MACnE,oBAAGI,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,yBAAgEL,KAChE,oBAAGG,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,uBAA8DJ,KAC9D,mBAAGE,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,SAAmD,mBAAG9D,KAAM2D,EAAT,0CAKjDI,GAAS,CACb,CACE1E,MAAO,0EACP2E,aAAc,gEAEhB,CACE3E,MAAO,0GACP2E,aAAc,uEAEhB,CACE3E,MAAO,6CACP2E,aAAc,8DAEhB,CACE3E,MAAO,kGACP2E,aAAc,sEAEhB,CACE3E,MAAO,2IACP2E,aAAc,4EAEhB,CACE3E,MAAO,kGACP2E,aAAc,mIAEhB,CACE3E,MAAO,kEACP2E,aAAc,yDAEhB,CACE3E,MAAO,yDACP2E,aAAc,wDAEhB,CACE3E,MAAO,wGACP2E,aAAc,uEAEhB,CACE3E,MAAO,qEACP2E,aAAc,qGAEhB,CACE3E,MAAO,4FACP2E,aAAc,mIAEhB,CACE3E,MAAO,sFACP2E,aAAc,0DAEhB,CACE3E,MAAO,+EACP2E,aAAc,gEAEhB,CACE3E,MAAO,8DACP2E,aAAc,gEAEhB,CACE3E,MAAO,4CACP2E,aAAc,mFAEhB,CACE3E,MAAO,6EACP2E,aAAc,gEAEhB,CACE3E,MAAO,6EACP2E,aAAc,0EAEhB,CACE3E,MAAO,uFACP2E,aAAc,gFAEhB,CACE3E,MAAO,0GACP2E,aAAc,qEAEhB,CACE3E,MAAO,2DACP2E,aAAc,wEAEhB,CACE3E,MAAO,iEACP2E,aAAc,qEAEhB,CACE3E,MAAO,oFACP2E,aAAc,2HAEhB,CACE3E,MAAO,mHACP2E,aAAc,8EAEhB,CACE3E,MAAO,sFACP2E,aAAc,oEAEhB,CACE3E,MAAO,qEACP2E,aAAc,8EAEhB,CACE3E,MAAO,gHACP2E,aAAc,qEAEhB,CACE3E,MAAO,8FACP2E,aAAc,qDAEhB,CACE3E,MAAO,0EACP2E,aAAc,wDAEhB,CACE3E,MAAO,8FACP2E,aAAc,sDAEhB,CACE3E,MAAO,iEACP2E,aAAc,qEAEhB,CACE3E,MAAO,4DACP2E,aAAc,sEAEhB,CACE3E,MAAO,+FACP2E,aAAc,4EAEhB,CACE3E,MAAO,0GACP2E,aAAc,uDAEhB,CACE3E,MAAO,2DACP2E,aAAc,sFAIZC,G,oDACJ,WAAYC,GAAQ,IAAD,+BACjB,cAAMA,IACDC,MAAQ,CACXC,iBAAiB,EACjBC,SAAU,IAEZ,EAAKC,aAAe,EAAKA,aAAaC,KAAlB,iBACpB,EAAKC,aAAe,EAAKA,aAAaD,KAAlB,iBAPH,E,iDAUnB,SAAaE,GACXA,EAAMC,iBAEkB,oBACpBC,KAAKR,MAAME,SACbM,KAAKC,SAAS,CAAER,iBAAiB,IAEjCS,MAAM,wB,0BAIV,SAAaJ,GACXE,KAAKC,SAAS,CAAEP,SAAUI,EAAM1C,OAAO+C,U,oBAGzC,WACE,OAAIH,KAAKR,MAAMC,gBAEX,gCACE,yGAA2E,mBAAGpE,KAAK,wCAAR,sBAA3E,sDACA,cAAC,GAAD,CACEsD,MAAM,uBACNC,KAAK,mEACLC,KAAK,6EACLC,UAAU,gBACVC,SAAS,SACTC,gBAAgB,wCAEjB,+LACA,4NACA,oEACA,kCACH,gCACE,+BACE,6CACA,oDAGJ,gCACGI,GAAO3F,KAAI,SAAC2G,GAAD,OACV,+BACE,6BAAKA,EAAM1F,QACX,6BAAI,mBAAGW,KAAM+E,EAAMf,aAAcjC,OAAO,SAASC,IAAI,sBAAjD,0BAFG+C,EAAM1F,kBAWnB,uBAAM2F,SAAUL,KAAKL,aAArB,UACE,iFACA,8CAEE,uBAAOW,KAAK,WAAWH,MAAOH,KAAKR,MAAME,SAAUa,SAAUP,KAAKH,kBAEpE,wBAAQS,KAAK,SAAb,2B,GApEcE,IAAMC,WAsFfC,GATY,WACzB,OACA,gCACE,cAAC,EAAD,8BACA,cAAC,GAAD,QChMWC,GAzCkB,WAC/B,OACE,gCACE,cAAC,EAAD,gCAEE,wDAHJ,8CAI+C,mBAAGtF,KAAK,wCAAR,8BAJ/C,IAKI,+BACI,iFAAmD,mBAAGA,KAAK,wCAAR,qCAAnD,kCACA,4FACA,gHAEJ,gEAVJ,sDAWuD,mBAAGA,KAAK,wCAAR,oBAXvD,mBAWiI,mBAAGA,KAAK,wCAAR,kBAXjI,KAYI,+BACI,yGACA,iEACA,oFACA,8FAEJ,mDAlBJ,sDAoBI,+BACI,gGACA,yDAA2B,mBAAGA,KAAK,wCAAR,oCAA3B,sBACA,iFACA,uKACA,iMAC0C,mBAAGA,KAAK,sHAAR,wBAD1C,OAGA,+BACI,sDADJ,iBACmD,mBAAGA,KAAK,mDAAR,gCADnD,4BAC6J,mBAAGA,KAAK,sHAAR,uBAD7J,2EAKJ,2KCrBOuF,GAdmB,WAChC,OACE,gCACE,cAAC,EAAD,iCADF,sCAEqC,uBAFrC,mDAG4C,uBAH5C,2CAI0C,uBAJ1C,6CAK4C,uBAL5C,uCAMsC,uBANtC,8CCmBW9G,I,MArBI,SAAC,GAAe,IAAbC,EAAY,EAAZA,MAClB,OACI,wBAAO5B,UAAU,mBAAjB,UACI,gCACI,+BACI,oBAAI6B,MAAM,MAAV,kBACA,2CAGR,gCACKD,EAAMN,KAAI,gBAAGQ,EAAH,EAAGA,KAAMC,EAAT,EAASA,KAAT,OACP,+BACI,6BAAKD,IACL,6BAAKC,iBCFd2G,GATgB,WAC7B,OACE,qCACE,cAAC,EAAD,8BACA,cAAC,GAAD,CAAY9G,MAAOO,QCYVG,I,MAjBI,SAAC,GAAe,IAAbV,EAAY,EAAZA,MACpB,OACE,qBAAK5B,UAAU,mBAAf,SACG4B,EAAMN,KAAI,gBAAEiB,EAAF,EAAEA,MAAOC,EAAT,EAASA,OAAQuC,EAAjB,EAAiBA,SAAUtC,EAA3B,EAA2BA,YAA3B,OACT,6BACE,+BACE,oBAAIzC,UAAU,oBAAd,SAAmCuC,IAClCC,EAAO,uBACR,mBAAGU,KAAM8B,GAAyBD,EAAWE,OAAO,SAASC,IAAI,sBAAjE,0BACA,cAAC,IAAD,CAAcxC,MAAO,EAArB,SAAyBD,gBCMtB2C,GAdkB,WAC/B,OAKE,cAJA,CAIA,iBACE,cAAC,EAAD,gCACA,cAAC,GAAD,CAAYxD,MAAOuD,SCOV7C,I,MAhBI,SAAC,GAAe,IAAbV,EAAY,EAAZA,MACpB,OACE,qBAAK5B,UAAU,mBAAf,SACG4B,EAAMN,KAAI,gBAAEiB,EAAF,EAAEA,MAAOC,EAAT,EAASA,OAAQC,EAAjB,EAAiBA,YAAjB,OACT,6BACE,+BACE,oBAAIzC,UAAU,oBAAd,SAAmCuC,IAClCC,EACD,cAAC,IAAD,CAAcE,MAAO,EAArB,SAAyBD,gBCYtB4C,GAlBgB,WAC7B,OAKE,cAJA,CAIA,iBACE,cAAC,EAAD,8BACA,6BAAI,mDACJ,cAAC,GAAD,CAAYzD,MAAO0D,KACnB,6BAAI,uCACJ,cAAC,GAAD,CAAY1D,MAAOiB,KACnB,6BAAI,yCACJ,cAAC,GAAD,CAAYjB,MAAOkB,SCOVC,I,MAvBC,SAAC,GAAe,IAAbnB,EAAY,EAAZA,MACjB,OACE,uBAAO5B,UAAU,gBAAjB,SACG4B,EAAMN,KAAI,gBAAG0B,EAAH,EAAGA,SAAUC,EAAb,EAAaA,QAAb,OACT,6BACE,+BACE,oBAAIjD,UAAU,oBAAd,SAAmCgD,IAClCC,EAAQ3B,KAAI,gBAAGQ,EAAH,EAAGA,KAAMoB,EAAT,EAASA,KAAMC,EAAf,EAAeA,QAAf,OACX,sBAAKnD,UAAU,kBAAf,UACE,mBAAGA,UAAU,wBAAwBkD,KAAMA,EAA3C,SACGpB,IAEH,uBACA,sBAAM9B,UAAU,mBAAhB,SAAoCmD,sBCDrCwF,GATqB,WAClC,OACE,gCACE,cAAC,EAAD,mCACA,cAAC,GAAD,CAAS/G,MAAO0B,QCmEPsF,GA1EqB,WAClC,OACE,gCACE,cAAC,EAAD,oCACA,qPACA,4CACA,+BACA,6BAAI,gEACF,+BACE,yMACA,6JACA,sEAAwC,mBAAG1F,KAAK,gBAAR,8BAAxC,0BAA2G,mBAAGA,KAAK,sBAAR,oCAA3G,UAED,6BAAI,sGACL,+BACE,sKACA,oPAEF,6BAAI,mGACJ,+BACE,qPACA,4OAEF,6BAAI,2GACJ,+BACE,sRACA,qPACA,ihBACA,sLAGJ,6CACA,+BACE,+BAAI,wCAAJ,6JACA,+BAAI,4CAAJ,8FACA,+BACE,iRACA,kaACA,oTACA,wVAEF,+BAAI,8CAAJ,iJACA,+BACE,8EACA,8EACA,6EACA,uDACA,0EACA,0EACA,uEACA,oDACA,qDACA,wDAEJ,+BAAI,iDAAJ,2GACA,+BACI,gLACA,gQACA,qQACA,8QACA,iNAGJ,2CACA,8BAAG,iDAAH,8eACA,8BAAG,wDAAH,q0BACA,8BAAG,kDAAH,ouBACA,8BAAG,uEAAH,gkBAEA,oCAAM,mFAAqD,mBAAGA,KAAK,gBAAR,6BAArD,OAAN,WCrDSuC,GAde,WAC5B,OACE,gCACE,cAAC,EAAD,6BACA,gCACA,+BACA,6BAAI,cAAC,GAAD,CAAS7D,MAAOA,MACpB,6BAAI,cAAC,GAAD,CAASA,MAAO8B,eCqBXmF,I,MA7BU,WACvB,OACE,gCACA,cAAC,EAAD,uBACF,mCACA,oBAAI7I,UAAU,+BAAd,2BACA,uBAAOA,UAAU,qBAAjB,SACE,6BACA,6BAAI,qBAAKC,IAAK4D,EAAQ,IAA2CC,QAASjC,MAAM,cAGlF,oBAAI7B,UAAU,iCAAd,6BACA,uBAAOA,UAAU,qBAAjB,SACE,6BAEE,6BAAI,qBAAKC,IAAK4D,EAAQ,IAAkCC,QAASjC,MAAM,cAG3E,oBAAI7B,UAAU,iCAAd,6BACA,uBAAOA,UAAU,qBAAjB,SACE,6BACA,6BAAI,qBAAKC,IAAK4D,EAAQ,IAAuCC,QAASjC,MAAM,sBCbjEiH,GATW,WACxB,OACE,gCACE,cAAC,EAAD,0BACA,0CCMSC,GAVkB,WAC/B,OACE,gCACE,cAAC,EAAD,gCADF,UCwCWC,I,MAvCO,WACpB,OACE,gCACE,cAAC,EAAD,yDACA,sBAAK9E,MAAM,2BAAX,UACE,krBAGA,8uBAEE,4FAA8D,mBAAGhB,KAAK,uEAAR,+BAA9D,8DANJ,4DASE,+BACE,gFACA,0EACA,oFACA,6EACA,+EACA,uGACA,kFACA,qFACA,wDACA,uHACA,yFACA,gEACA,kFAEF,8SAEF,uBACA,sBAAKgB,MAAM,2BAAX,UACA,iDACA,cAAC,GAAD,CAAYtC,MAAOO,YCmHV8G,GAvJY,WACvB,OACI,gCACI,cAAC,EAAD,0BACA,6BAAI,sBAAM5E,MAAM,WAAZ,uCACJ,kCACI,6BAAI,oFAEJ,6BAAI,yDAEJ,+BAAI,iFACA,6BACI,8GAGR,6BAAI,6DAEJ,6BAAI,iFACJ,+BACI,uEACA,kHACA,+GACA,8IACA,4IACA,mIAEJ,6BAAI,8DAEJ,6BAAI,0DACJ,+BACI,sHACA,wHACA,0GACA,yHACA,gHACA,mIACA,yGACA,kIACA,gJACA,mFACA,sFACA,0IACA,uHACA,uGACA,wIAEJ,+BAAI,gFACA,6BACI,yEAIZ,uBACA,6BAAI,sBAAMA,MAAM,WAAZ,wCACJ,kCACI,6BAAI,oEAEJ,6BAAI,wDACJ,+BACI,yGACA,8GAEJ,6BAAI,8DAEJ,6BAAI,2DACJ,+BACI,wHACA,0GACA,4GACA,wFACA,oGACA,4FACA,4HACA,qHACA,+GACA,2FACA,yGACA,0GACA,mIACA,gHACA,2JAEJ,6BAAI,6DAEJ,+BAAI,yEACA,6BACI,yFAGR,6BAAI,iEAEJ,6BAAI,kDAEJ,6BAAI,2EAGR,uBACA,6BAAI,sBAAMA,MAAM,WAAZ,0CACJ,kCACI,6BAAI,oEAEJ,+BAAI,0EACA,6BACI,iGAGR,6BAAI,qEAEJ,6BAAI,0DACJ,+BACI,qHACA,+GACA,+GACA,kJAEJ,6BAAI,6DAEJ,6BAAI,yDACJ,+BACI,oHACA,0JACA,4HAEJ,6BAAI,8DAEJ,6BAAI,4DACJ,+BACI,iHACA,+HACA,gJACA,6HACA,kKACA,sHACA,2IACA,+HACA,qGACA,yFACA,6JACA,4HACA,6HACA,mFACA,2JAEJ,6BAAI,4DAGR,2BCzHG0B,GAzBgB,WAC3B,OACI,gCACI,cAAC,EAAD,+BACA,kCAEA,oCAAM,mBAAG7C,KAAK,yCAAR,4BAAN,8BACA,+BAAI,uCAAJ,mDACA,6BAAI,8BAAG,0CAAH,wtBAGJ,oCAAM,mBAAGA,KAAK,8CAAR,2BAAN,4EACA,+BAAI,uCAAJ,0DACA,6BAAI,8BAAG,0CAAH,koBAGJ,oCAAM,mBAAGA,KAAK,gDAAR,4BAAN,0DACA,+BAAI,uCAAJ,+CACA,6BAAI,8BAAG,0CAAH,goBCiEDgG,GAlFe,WAC5B,OACE,gCACE,cAAC,EAAD,8BAED,0ZAEJ,2CALG,oCAOH,+BACA,mCAAK,yGAAL,OACA,+DACA,iFACA,+DACA,mFACA,mFAEA,2CAAa,mBAAGhG,KAAK,kEAAR,6EAAb,OAEA,yCACA,kuBAEA,0oBAEA,kDACA,kTACA,0FAEA,+BACA,gFACA,0EACA,oFACA,6EACA,+EACA,uGACA,kFACA,qFACA,wDACA,wHACA,yFACA,oEACA,sEAIA,qDACA,8fAEA,uEACA,0rBAEA,mEAAqC,mBAAGA,KAAK,uFAAuFsB,UAAQ,EAAvG,wCAArC,0HAGA,4DACA,qjCAEA,4JAEA,6VAEA,kDACA,m6CAEA,0VAEA,iDACA,iJAEA,0RAEA,kYAEA,uEAAyC,uBAAzC,+BAGA,6DAA+B,uBAA/B,mCC/Bc2E,GA9Ce,WAC5B,OACE,gCACE,cAAC,EAAD,8BACF,uaACA,4CACE,+BACA,6BAAI,gEACF,+BACE,yMACA,sEAAwC,mBAAGjG,KAAK,gBAAR,8BAAxC,0BAA2G,mBAAGA,KAAK,sBAAR,oCAA3G,OACA,+OAED,6BAAI,6EACL,6BACE,0LAEF,6BAAI,wFACJ,+BACE,wJACA,qMACA,4KAEF,6BAAI,yGACJ,+BACE,gPACA,kJAEF,6BAAI,oGACJ,+BACE,0MACA,qGACA,uPAEF,6BAAI,4FACJ,+BACE,mcACA,2IAIJ,oCAAM,8EAAgD,mBAAGA,KAAK,gBAAR,6BAAhD,QAAgG,mBAAGA,KAAK,uBAAR,mCAAhG,OAAN,WCrBSiD,GApBa,WAC1B,OACE,gCACE,cAAC,EAAD,8BACD,yCACA,ySACD,qCACC,0lBACD,wCACC,m0CACD,wCACC,uoBACD,qDACC,onBACA,0HCGUiD,GAjBa,WAC1B,OACE,gCACE,cAAC,EAAD,oCACF,8HACA,yVACA,+BACA,6BAAI,2FADJ,4VAGA,6BAAI,kGAHJ,qYCKWhD,GAVa,WAC1B,OACE,gCACE,cAAC,EAAD,wCACF,yCCQWiD,I,MAZc,WAC3B,OACE,gCACE,cAAC,EAAD,qCACA,uCACA,4DAA8B,mBAAGnG,KAAK,uEAAR,+BAA9B,uJACA,8CACA,iEAAmC,mBAAGA,KAAK,uCAAR,kBAAnC,8NCYSoD,GApByB,WACtC,OACE,gCACE,cAAC,EAAD,wCACA,yDACA,+BACA,6BAAI,oFACJ,6BAAI,2GAEJ,2DACA,6BACE,+BAAI,4HAAJ,SAEF,sDACA,8BAAG,0IAAH,OACA,8BAAG,sDAAwB,mBAAGpD,KAAI,2BAAP,+BAAxB,4FAAH,WCfAqD,GAAW,SAAC,GAAD,IAAGC,EAAH,EAAGA,MAAOC,EAAV,EAAUA,KAAMC,EAAhB,EAAgBA,KAAMC,EAAtB,EAAsBA,UAAWC,EAAjC,EAAiCA,SAAUC,EAA3C,EAA2CA,gBAA3C,OACf,gCACE,6BAAKL,IACL,oBAAGM,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,mBAA0DP,KAC1D,mBAAGK,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,gCACA,mBAAGF,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,SAAmD,mBAAG9D,KAAMwD,EAAT,SAAgBA,MACnE,oBAAGI,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,yBAAgEL,KAChE,oBAAGG,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,uBAA8DJ,KAC9D,mBAAGE,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,SAAmD,mBAAG9D,KAAM2D,EAAT,0CAKjDI,GAAS,CACb,CACE1E,MAAO,0EACP2E,aAAc,gEAEhB,CACE3E,MAAO,0GACP2E,aAAc,uEAEhB,CACE3E,MAAO,6CACP2E,aAAc,8DAEhB,CACE3E,MAAO,kGACP2E,aAAc,sEAEhB,CACE3E,MAAO,2IACP2E,aAAc,4EAEhB,CACE3E,MAAO,kGACP2E,aAAc,mIAEhB,CACE3E,MAAO,kEACP2E,aAAc,yDAEhB,CACE3E,MAAO,yDACP2E,aAAc,wDAEhB,CACE3E,MAAO,wGACP2E,aAAc,uEAEhB,CACE3E,MAAO,qEACP2E,aAAc,qGAEhB,CACE3E,MAAO,4FACP2E,aAAc,mIAEhB,CACE3E,MAAO,sFACP2E,aAAc,0DAEhB,CACE3E,MAAO,+EACP2E,aAAc,gEAEhB,CACE3E,MAAO,8DACP2E,aAAc,gEAEhB,CACE3E,MAAO,4CACP2E,aAAc,mFAEhB,CACE3E,MAAO,6EACP2E,aAAc,gEAEhB,CACE3E,MAAO,6EACP2E,aAAc,0EAEhB,CACE3E,MAAO,uFACP2E,aAAc,gFAEhB,CACE3E,MAAO,0GACP2E,aAAc,qEAEhB,CACE3E,MAAO,2DACP2E,aAAc,wEAEhB,CACE3E,MAAO,iEACP2E,aAAc,qEAEhB,CACE3E,MAAO,oFACP2E,aAAc,2HAEhB,CACE3E,MAAO,mHACP2E,aAAc,8EAEhB,CACE3E,MAAO,sFACP2E,aAAc,oEAEhB,CACE3E,MAAO,qEACP2E,aAAc,8EAEhB,CACE3E,MAAO,gHACP2E,aAAc,qEAEhB,CACE3E,MAAO,8FACP2E,aAAc,qDAEhB,CACE3E,MAAO,0EACP2E,aAAc,wDAEhB,CACE3E,MAAO,8FACP2E,aAAc,sDAEhB,CACE3E,MAAO,iEACP2E,aAAc,qEAEhB,CACE3E,MAAO,4DACP2E,aAAc,sEAEhB,CACE3E,MAAO,+FACP2E,aAAc,4EAEhB,CACE3E,MAAO,0GACP2E,aAAc,uDAEhB,CACE3E,MAAO,2DACP2E,aAAc,sFAIZC,G,oDACJ,WAAYC,GAAQ,IAAD,+BACjB,cAAMA,IACDC,MAAQ,CACXC,iBAAiB,EACjBC,SAAU,IAEZ,EAAKC,aAAe,EAAKA,aAAaC,KAAlB,iBACpB,EAAKC,aAAe,EAAKA,aAAaD,KAAlB,iBAPH,E,iDAUnB,SAAaE,GACXA,EAAMC,iBAEkB,oBACpBC,KAAKR,MAAME,SACbM,KAAKC,SAAS,CAAER,iBAAiB,IAEjCS,MAAM,wB,0BAIV,SAAaJ,GACXE,KAAKC,SAAS,CAAEP,SAAUI,EAAM1C,OAAO+C,U,oBAGzC,WACE,OAAIH,KAAKR,MAAMC,gBAEX,gCACE,yGAA2E,mBAAGpE,KAAK,wCAAR,sBAA3E,sDACA,cAAC,GAAD,CACEsD,MAAM,uBACNC,KAAK,mEACLC,KAAK,6EACLC,UAAU,gBACVC,SAAS,SACTC,gBAAgB,wCAEjB,+LACA,4NACA,oEACA,kCACH,gCACE,+BACE,6CACA,oDAGJ,gCACGI,GAAO3F,KAAI,SAAC2G,GAAD,OACV,+BACE,6BAAKA,EAAM1F,QACX,6BAAI,mBAAGW,KAAM+E,EAAMf,aAAcjC,OAAO,SAASC,IAAI,sBAAjD,0BAFG+C,EAAM1F,kBAWnB,uBAAM2F,SAAUL,KAAKL,aAArB,UACE,iFACA,8CAEE,uBAAOW,KAAK,WAAWH,MAAOH,KAAKR,MAAME,SAAUa,SAAUP,KAAKH,kBAEpE,wBAAQS,KAAK,SAAb,2B,GApEcE,IAAMC,WAsFfC,GATY,WACzB,OACA,gCACE,cAAC,EAAD,8BACA,cAAC,GAAD,QChMWC,GAzCkB,WAC/B,OACE,gCACE,cAAC,EAAD,gCAEE,wDAHJ,8CAI+C,mBAAGtF,KAAK,wCAAR,8BAJ/C,IAKI,+BACI,iFAAmD,mBAAGA,KAAK,wCAAR,qCAAnD,kCACA,4FACA,gHAEJ,gEAVJ,sDAWuD,mBAAGA,KAAK,wCAAR,oBAXvD,mBAWiI,mBAAGA,KAAK,wCAAR,kBAXjI,KAYI,+BACI,yGACA,iEACA,oFACA,8FAEJ,mDAlBJ,sDAoBI,+BACI,gGACA,yDAA2B,mBAAGA,KAAK,wCAAR,oCAA3B,sBACA,iFACA,uKACA,iMAC0C,mBAAGA,KAAK,sHAAR,wBAD1C,OAGA,+BACI,sDADJ,iBACmD,mBAAGA,KAAK,mDAAR,gCADnD,4BAC6J,mBAAGA,KAAK,sHAAR,uBAD7J,2EAKJ,2KC1BOuF,GATmB,WAChC,OACE,gCACE,cAAC,EAAD,iCACA,0CCiBS9G,I,MArBI,SAAC,GAAe,IAAbC,EAAY,EAAZA,MAClB,OACI,wBAAO5B,UAAU,mBAAjB,UACI,gCACI,+BACI,oBAAI6B,MAAM,MAAV,kBACA,2CAGR,gCACKD,EAAMN,KAAI,gBAAGQ,EAAH,EAAGA,KAAMC,EAAT,EAASA,KAAT,OACP,+BACI,6BAAKD,IACL,6BAAKC,iBCFduH,GATgB,WAC3B,OACI,qCACI,cAAC,EAAD,8BACA,cAAC,GAAD,CAAY1H,MAAOQ,QCYhBE,I,MAjBI,SAAC,GAAe,IAAbV,EAAY,EAAZA,MACpB,OACE,qBAAK5B,UAAU,mBAAf,SACG4B,EAAMN,KAAI,gBAAEiB,EAAF,EAAEA,MAAOC,EAAT,EAASA,OAAQuC,EAAjB,EAAiBA,SAAUtC,EAA3B,EAA2BA,YAA3B,OACT,6BACE,+BACE,oBAAIzC,UAAU,oBAAd,SAAmCuC,IAClCC,EAAO,uBACR,mBAAGU,KAAM8B,GAAyBD,EAAWE,OAAO,SAASC,IAAI,sBAAjE,0BACA,cAAC,IAAD,CAAcxC,MAAO,EAArB,SAAyBD,gBC0BtB0C,GAvCU,CACvB,CACE5C,MAAO,0EACPC,OACE,+BACFuC,SAAU,sCAEZ,CACExC,MAAO,yDACPC,OACE,wGACFuC,SAAU,wCAEZ,CACExC,MAAO,uDACPC,OACE,4BACFuC,SAAU,oCAEZ,CACExC,MAAO,wDACPC,OACE,6CACFuC,SAAU,8CAEZ,CACExC,MAAO,4DACPC,OACE,mEACFuC,SAAU,qCAEZ,CACExC,MAAO,wFACPC,OACE,kHACFuC,SAAU,uCChBCwE,GAdkB,WAC7B,OAKI,cAJA,CAIA,iBACI,cAAC,EAAD,gCACA,cAAC,GAAD,CAAY3H,MAAOuD,SCOhB7C,I,MAhBI,SAAC,GAAe,IAAbV,EAAY,EAAZA,MACpB,OACE,qBAAK5B,UAAU,mBAAf,SACG4B,EAAMN,KAAI,gBAAEiB,EAAF,EAAEA,MAAOC,EAAT,EAASA,OAAQC,EAAjB,EAAiBA,YAAjB,OACT,6BACE,+BACE,oBAAIzC,UAAU,oBAAd,SAAmCuC,IAClCC,EACD,cAAC,IAAD,CAAcE,MAAO,EAArB,SAAyBD,gBC+BtBE,GA3CQ,CACrB,CACEJ,MAAO,iDACPC,OACE,+BAEJ,CACED,MAAO,+BACPC,OACE,6DAEJ,CACED,MAAO,+DACPC,OACE,wFAEJ,CACED,MAAO,kFACPC,OACE,6CAEJ,CACED,MAAO,6FACPC,OACE,6GAEJ,CACED,MAAO,6CACPC,OACE,kEAEJ,CACED,MAAO,wEACPC,OACE,0CAEJ,CACED,MAAO,gEACPC,OACE,kCCwFSG,GA/HQ,CACrB,CACEJ,MAAO,8DACPC,OAAQ,+BAER,CACAD,MAAO,8FACPC,OAAQ,yGAER,CACAD,MAAO,iCACPC,OAAQ,yDAER,CACAD,MAAO,sFACPC,OAAQ,mEAER,CACAD,MAAO,4EACPC,OAAQ,gBAER,CACAD,MAAO,+HACPC,OAAQ,mEAER,CACAD,MAAO,sDACPC,OAAQ,2CAER,CACAD,MAAO,gDACPC,OAAQ,yFAER,CACAD,MAAO,+EACPC,OAAQ,yFAER,CACAD,MAAO,2FACPC,OAAQ,yEAER,CACAD,MAAO,wDACPC,OAAQ,iEAER,CACAD,MAAO,sFACPC,OAAQ,8FAER,CACAD,MAAO,4CACPC,OAAQ,iBAER,CACAD,MAAO,sDACPC,OAAQ,4BAER,CACAD,MAAO,+DACPC,OAAQ,2DAER,CACAD,MAAO,yEACPC,OAAQ,qEAER,CACAD,MAAO,4FACPC,OAAQ,uHAER,CACAD,MAAO,6CACPC,OAAQ,8DAER,CACAD,MAAO,mDACPC,OAAQ,wEAER,CACAD,MAAO,sEACPC,OAAQ,0FAER,CACAD,MAAO,qGACPC,OAAQ,yCAER,CACAD,MAAO,mGACPC,OAAQ,4EAER,CACAD,MAAO,uDACPC,OAAQ,wDAER,CACAD,MAAO,iFACPC,OAAQ,qFAER,CACAD,MAAO,6DACPC,OAAQ,0CAER,CACAD,MAAO,iFACPC,OAAQ,4EAER,CACAD,MAAO,oDACPC,OAAQ,qFAER,CACAD,MAAO,+CACPC,OAAQ,qLAER,CACAD,MAAO,+EACPC,OAAQ,2DAER,CACAD,MAAO,yEACPC,OAAQ,+DAER,CACAD,MAAO,kEACPC,OAAQ,2BCnHGG,GARQ,CACrB,CACEJ,MAAO,+BACPC,OACE,8DCoBSgH,GAlBgB,WAC3B,OAKI,cAJA,CAIA,iBACI,cAAC,EAAD,8BACA,6BAAI,mDACJ,cAAC,GAAD,CAAY5H,MAAO0D,KACnB,6BAAI,uCACJ,cAAC,GAAD,CAAY1D,MAAOiB,KACnB,6BAAI,yCACJ,cAAC,GAAD,CAAYjB,MAAOkB,SCOhBC,I,MAvBC,SAAC,GAAe,IAAbnB,EAAY,EAAZA,MACjB,OACE,uBAAO5B,UAAU,gBAAjB,SACG4B,EAAMN,KAAI,gBAAG0B,EAAH,EAAGA,SAAUC,EAAb,EAAaA,QAAb,OACT,6BACE,+BACE,oBAAIjD,UAAU,oBAAd,SAAmCgD,IAClCC,EAAQ3B,KAAI,gBAAGQ,EAAH,EAAGA,KAAMoB,EAAT,EAASA,KAAMC,EAAf,EAAeA,QAAf,OACX,sBAAKnD,UAAU,kBAAf,UACE,mBAAGA,UAAU,wBAAwBkD,KAAMA,EAA3C,SACGpB,IAEH,uBACA,sBAAM9B,UAAU,mBAAhB,SAAoCmD,sBCDrCsG,GATqB,WAChC,OACI,gCACI,cAAC,EAAD,mCACA,cAAC,GAAD,CAAS7H,MAAO2B,QCmEbmG,GA1EqB,WAChC,OACI,gCACI,cAAC,EAAD,oCACA,oPACA,4CACA,+BACI,6BAAI,yDACJ,+BACI,yMACA,6JACA,sEAAwC,mBAAGxG,KAAK,gBAAR,8BAAxC,0BAA2G,mBAAGA,KAAK,sBAAR,oCAA3G,UAEJ,6BAAI,oFACJ,+BACI,sKACA,oPAEJ,6BAAI,gFACJ,+BACI,qPACA,4OAEJ,6BAAI,uFACJ,+BACI,sRACA,qPACA,ihBACA,sLAGR,6CACA,+BACI,+BAAI,wCAAJ,6JACA,+BAAI,4CAAJ,8FACA,+BACI,iRACA,kaACA,oTACA,wVAEJ,+BAAI,8CAAJ,iJACA,+BACI,8EACA,8EACA,6EACA,uDACA,0EACA,0EACA,uEACA,oDACA,qDACA,wDAEJ,+BAAI,iDAAJ,2GACA,+BACI,gLACA,gQACA,qQACA,8QACA,iNAGR,2CACA,8BAAG,iDAAH,8eACA,8BAAG,wDAAH,q0BACA,8BAAG,kDAAH,ouBACA,8BAAG,uEAAH,gkBAEA,oCAAM,mFAAqD,mBAAGA,KAAK,gBAAR,6BAArD,OAAN,WCrDGyG,GAde,WAC5B,OACE,gCACE,cAAC,EAAD,6BACA,gCACA,+BACA,6BAAI,cAAC,GAAD,CAAS/H,MAAOA,MACpB,6BAAI,cAAC,GAAD,CAASA,MAAO8B,eCGXkG,I,MAXU,WACrB,OACI,gCACI,cAAC,EAAD,uBACA,iCACI,6CCsBDC,GA5BW,WACtB,OACI,gCACI,cAAC,EAAD,0BADJ,wCAEyC,uBAFzC,oDAGqD,uBAHrD,gDAIiD,uBAJjD,8CAKqC,uBALrC,oEAMqE,uBANrE,+CAOgD,uBAPhD,yCAQ0C,uBAR1C,qCASsC,uBATtC,0CAU2C,uBAV3C,kDAWmD,uBAXnD,kDAYmD,uBAZnD,6CAa8C,uBAb9C,0CAc2C,uBAd3C,iCAekC,uBAflC,4CAgB6C,uBAhB7C,4CAiB6C,uBAjB7C,oCAkBqC,uBAlBrC,4CAmB6C,uBAnB7C,6CAoB8C,uBApB9C,uCAqBwC,2BCbjCC,GAVkB,WAC7B,OACI,gCACI,cAAC,EAAD,gCADJ,UC+COC,I,MA9CO,WAClB,OACI,gCACI,cAAC,EAAD,yDACA,sBAAK7F,MAAM,2BAAX,UACI,krBAGA,8uBASA,4BAAG,0GAbP,4DAeI,+BACI,gFACA,0EACA,oFACA,6EACA,+EACA,uGACA,kFACA,qFACA,wDACA,uHACA,yFACA,gEACA,kFAEJ,uLAGJ,uBACA,sBAAKA,MAAM,2BAAX,UACI,iDACA,cAAC,GAAD,CAAYtC,MAAOQ,YC6IpB4H,GAxLY,WACvB,OACI,gCACI,cAAC,EAAD,0BACA,6FAEA,6BAAI,sBAAM3F,MAAM,WAAZ,wCACJ,kCACI,6BACI,4DAEJ,6BACI,uDAEJ,6BACI,yDAA2B,mBAAGnB,KAAK,qCAAR,6BAK/B,6BACI,4DAEJ,+BACI,4DACA,+BACI,uFACA,sIACA,mHAGR,6BACI,2DAEJ,6BACI,gEAEJ,+BACI,yFACA,mGACA,8HACA,oHACA,iLACA,yKACA,+FACA,6IACA,yHACA,uMACA,8HACA,yIACA,oJACA,gHACA,oIACA,uIACA,uGACA,iFACA,iGACA,6FAEJ,6BACI,4DAEJ,+BACI,6DACA,+BACI,kFACA,sHACA,+GAKZ,uBACA,6BAAI,sBAAMmB,MAAM,WAAZ,uCACJ,kCACI,6BACI,yDAA2B,mBAAGnB,KAAK,gEAAR,+BAK/B,6BACI,4DAEJ,6BACI,iEAEJ,+BACI,oFACA,6FACA,kGACA,oGACA,kJACA,oFACA,oGACA,yGACA,yFACA,yHACA,yHACA,2GACA,uIACA,wGACA,yGACA,kHACA,sFACA,mMAEJ,6BACI,2DAEJ,+BACI,8DACA,+BACI,mFACA,kHACA,qIAGR,6BACI,4DAEJ,6BACI,oEAEJ,6BACI,0EAEJ,6BACI,gDAAkB,mBAAGA,KAAK,iDAAR,8CAEtB,6BACI,8DAEJ,6BACI,gEAIR,uBACA,6BAAI,sBAAMmB,MAAM,WAAZ,qCACJ,kCACI,6BACI,yDAA2B,mBAAGnB,KAAK,gFAAR,mCAK/B,6BACI,4DAEJ,6BACI,kEAEJ,+BACI,qFACA,wIACA,iGACA,mKACA,yIACA,yHACA,4GACA,mKACA,6IACA,qHACA,wIACA,2HACA,0KACA,kMACA,yGACA,iIACA,kIACA,6KACA,uJACA,kFAEJ,6BACI,0DAGR,2BC1JG6C,GAzBgB,WAC3B,OACI,gCACI,cAAC,EAAD,+BACA,kCAEI,oCAAM,mBAAG7C,KAAK,yCAAR,4BAAN,8BACA,+BAAI,uCAAJ,mDACA,6BAAI,8BAAG,0CAAH,wtBAGJ,oCAAM,mBAAGA,KAAK,8CAAR,2BAAN,4EACA,+BAAI,uCAAJ,0DACA,6BAAI,8BAAG,0CAAH,koBAGJ,oCAAM,mBAAGA,KAAK,gDAAR,4BAAN,0DACA,+BAAI,uCAAJ,+CACA,6BAAI,8BAAG,0CAAH,goBCkEL+G,GAnFe,WAC1B,OACI,gCACI,cAAC,EAAD,8BAEA,0ZAEA,2CALJ,oCAOI,+BACI,8DAAgC,oDAAhC,mDACA,iEACA,yDAA2B,qDAA3B,iCACA,gEACA,mFACA,oFAEJ,2CAAa,mBAAG/G,KAAK,kEAAR,6EAAb,OAEA,yCACA,muBAEA,86BAKA,2FACA,+BACI,qEACA,8EACA,2EACA,qFACA,iEACA,qHACA,wGACA,gFACA,0GACA,wEACA,qJACA,sFACA,6FAGJ,qDACA,0LAEA,uEACA,0rBAEA,mEAAqC,mBAAGA,KAAK,2EAA2EsB,UAAQ,EAA3F,wCAArC,0HAGA,4DACA,qjCAEA,4JAEA,6VAEA,kDACA,k6CAEA,0VAEA,iDACA,iJAEA,0RAEA,kYAEA,8DAAgC,uBAAhC,+BAGA,4DAA8B,uBAA9B,mCC9BG0F,GA9Ce,WAC5B,OACE,gCACE,cAAC,EAAD,8BACF,+VAAiU,mBAAGhH,KAAK,oCAAR,wCAAjU,uDACA,4CACE,+BACA,6BAAI,yDACF,+BACE,yMACA,sEAAwC,mBAAGA,KAAK,gBAAR,8BAAxC,0BAA2G,mBAAGA,KAAK,sBAAR,oCAA3G,OACA,+OAED,6BAAI,2DACL,6BACE,0LAEF,6BAAI,uEACJ,+BACE,wJACA,qMACA,4KAEF,6BAAI,uFACJ,+BACE,gPACA,kJAEF,6BAAI,gFACJ,+BACE,0MACA,qGACA,uPAEF,6BAAI,2EACJ,+BACE,mcACA,2IAIJ,oCAAM,8EAAgD,mBAAGA,KAAK,gBAAR,6BAAhD,QAAgG,mBAAGA,KAAK,uBAAR,mCAAhG,OAAN,WCrBSiH,GApBa,WACxB,OACI,gCACI,cAAC,EAAD,8BACA,yCACA,ySACA,qCACA,0lBACA,wCACA,m0CACA,wCACA,uoBACA,qDACA,0IAA4G,mBAAGjH,KAAK,oCAAR,wCAA5G,sdACA,0HCGGiH,GAjBa,WACxB,OACI,gCACI,cAAC,EAAD,oCACA,8HACA,yVACA,+BACI,6BAAI,2FADR,4VAGI,6BAAI,kGAHR,mWAKgH,mBAAGjH,KAAK,oCAAR,wCALhH,WCKGkH,GAVa,WACxB,OACI,gCACI,cAAC,EAAD,wCACA,yCC8DGC,I,MAlEc,WACzB,OACI,gCACI,cAAC,EAAD,qCACA,uCACA,gEAAkC,mBAAGnH,KAAK,sBAAR,SAA8B,8DAAhE,KAAwG,yCAAxG,KAAyH,4CAAzH,QAAgJ,8CAAhJ,OAEA,8CACA,iEAAmC,mBAAGA,KAAK,sDAAR,SAA8D,oDAAjG,qNACA,sDACA,yFACA,+BACI,+BACI,mBAAGA,KAAK,kCAAR,SAA0C,kDAD9C,4BAEI,qEAAuC,mBAAGA,KAAK,8BAAR,kCAAvC,uCACA,+BACI,uDACA,mEACA,mDACA,iFAEJ,0DACA,uDACA,4DACA,kEAEJ,+BACI,mBAAGA,KAAK,0CAAR,SAAkD,0DADtD,2CAEI,yEAA2C,mBAAGA,KAAK,2EAAR,yFAC3C,2FAA6D,mBAAGA,KAAK,gCAAR,oCAA7D,yCACA,kEACA,2EACA,sEACA,kEAEJ,+BACI,mBAAGA,KAAK,kCAAR,SAA0C,6DAD9C,4CAEI,qEAAuC,mBAAGA,KAAK,qCAAR,yCAAvC,uCACA,+BACI,kDACA,4CACA,sEACA,oEACA,mEACA,sDAEJ,0DACA,uDACA,oDACA,gEACA,+DACA,+EAEJ,+BACI,mBAAGA,KAAK,qHAAR,SAA6H,2DADjI,2DAEI,oDAAsB,mBAAGA,KAAK,2HAAR,SAAmI,gDAAzJ,yMACA,mEACA,mEACA,oDACA,0ECxCLoH,GApByB,WACpC,OACI,gCACI,cAAC,EAAD,wCACA,yDACA,+BACI,6BAAI,oFACJ,6BAAI,2GAER,2DACA,6BACI,+BAAI,4HAAJ,SAEJ,sDACA,8BAAG,0IAAH,OACA,8BAAG,sDAAwB,mBAAGpH,KAAI,2BAAP,+BAAxB,4FAAH,WCfNqD,GAAW,SAAC,GAAD,IAAGC,EAAH,EAAGA,MAAOC,EAAV,EAAUA,KAAMC,EAAhB,EAAgBA,KAAMC,EAAtB,EAAsBA,UAAWC,EAAjC,EAAiCA,SAAUC,EAA3C,EAA2CA,gBAA3C,OACb,gCACI,6BAAKL,IACL,oBAAGM,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,mBAA0DP,KAC1D,mBAAGK,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,gCACA,mBAAGF,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,SAAmD,mBAAG9D,KAAMwD,EAAT,SAAgBA,MACnE,oBAAGI,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,yBAAgEL,KAChE,oBAAGG,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,uBAA8DJ,KAC9D,mBAAGE,MAAO,CAAEC,UAAW,EAAGC,aAAc,SAAxC,SAAmD,mBAAG9D,KAAM2D,EAAT,0CAKrDI,GAAS,CACX,CACI1E,MAAO,0EACP2E,aAAc,gEAElB,CACI3E,MAAO,0GACP2E,aAAc,uEAElB,CACI3E,MAAO,6CACP2E,aAAc,8DAElB,CACI3E,MAAO,kGACP2E,aAAc,sEAElB,CACI3E,MAAO,2IACP2E,aAAc,4EAElB,CACI3E,MAAO,kGACP2E,aAAc,mIAElB,CACI3E,MAAO,kEACP2E,aAAc,yDAElB,CACI3E,MAAO,yDACP2E,aAAc,wDAElB,CACI3E,MAAO,wGACP2E,aAAc,uEAElB,CACI3E,MAAO,qEACP2E,aAAc,qGAElB,CACI3E,MAAO,4FACP2E,aAAc,mIAElB,CACI3E,MAAO,sFACP2E,aAAc,0DAElB,CACI3E,MAAO,+EACP2E,aAAc,gEAElB,CACI3E,MAAO,8DACP2E,aAAc,gEAElB,CACI3E,MAAO,4CACP2E,aAAc,mFAElB,CACI3E,MAAO,6EACP2E,aAAc,gEAElB,CACI3E,MAAO,6EACP2E,aAAc,0EAElB,CACI3E,MAAO,uFACP2E,aAAc,gFAElB,CACI3E,MAAO,0GACP2E,aAAc,qEAElB,CACI3E,MAAO,2DACP2E,aAAc,wEAElB,CACI3E,MAAO,iEACP2E,aAAc,qEAElB,CACI3E,MAAO,oFACP2E,aAAc,2HAElB,CACI3E,MAAO,mHACP2E,aAAc,8EAElB,CACI3E,MAAO,sFACP2E,aAAc,oEAElB,CACI3E,MAAO,qEACP2E,aAAc,8EAElB,CACI3E,MAAO,gHACP2E,aAAc,qEAElB,CACI3E,MAAO,8FACP2E,aAAc,qDAElB,CACI3E,MAAO,0EACP2E,aAAc,wDAElB,CACI3E,MAAO,8FACP2E,aAAc,sDAElB,CACI3E,MAAO,iEACP2E,aAAc,qEAElB,CACI3E,MAAO,4DACP2E,aAAc,sEAElB,CACI3E,MAAO,+FACP2E,aAAc,4EAElB,CACI3E,MAAO,0GACP2E,aAAc,uDAElB,CACI3E,MAAO,2DACP2E,aAAc,sFAIhBC,G,oDACF,WAAYC,GAAQ,IAAD,+BACf,cAAMA,IACDC,MAAQ,CACTC,iBAAiB,EACjBC,SAAU,IAEd,EAAKC,aAAe,EAAKA,aAAaC,KAAlB,iBACpB,EAAKC,aAAe,EAAKA,aAAaD,KAAlB,iBAPL,E,iDAUnB,SAAaE,GACTA,EAAMC,iBAEkB,oBACpBC,KAAKR,MAAME,SACXM,KAAKC,SAAS,CAAER,iBAAiB,IAEjCS,MAAM,wB,0BAId,SAAaJ,GACTE,KAAKC,SAAS,CAAEP,SAAUI,EAAM1C,OAAO+C,U,oBAG3C,WACI,OAAIH,KAAKR,MAAMC,gBAEP,gCACI,yGAA2E,mBAAGpE,KAAK,wCAAR,sBAA3E,sDACA,cAAC,GAAD,CACIsD,MAAM,uBACNC,KAAK,mEACLC,KAAK,6EACLC,UAAU,gBACVC,SAAS,SACTC,gBAAgB,wCAEpB,+LACA,4NACA,oEACA,kCACI,gCACI,+BACI,6CACA,oDAGR,gCACKI,GAAO3F,KAAI,SAAC2G,GAAD,OACR,+BACI,6BAAKA,EAAM1F,QACX,6BAAI,mBAAGW,KAAM+E,EAAMf,aAAcjC,OAAO,SAASC,IAAI,sBAAjD,0BAFC+C,EAAM1F,kBAW/B,uBAAM2F,SAAUL,KAAKL,aAArB,UACI,iFACA,8CAEI,uBAAOW,KAAK,WAAWH,MAAOH,KAAKR,MAAME,SAAUa,SAAUP,KAAKH,kBAEtE,wBAAQS,KAAK,SAAb,2B,GApEIE,IAAMC,WAsFfC,GATY,WACvB,OACI,gCACI,cAAC,EAAD,8BACA,cAAC,GAAD,QChMGgC,GAzCkB,WAC7B,OACI,gCACI,cAAC,EAAD,gCAEA,wDAHJ,8CAI+C,mBAAGrH,KAAK,wCAAR,8BAJ/C,IAKI,+BACI,iFAAmD,mBAAGA,KAAK,wCAAR,qCAAnD,kCACA,4FACA,gHAEJ,gEAVJ,sDAWuD,mBAAGA,KAAK,wCAAR,oBAXvD,mBAWiI,mBAAGA,KAAK,wCAAR,kBAXjI,KAYI,+BACI,yGACA,iEACA,oFACA,8FAEJ,mDAlBJ,sDAoBI,+BACI,gGACA,yDAA2B,mBAAGA,KAAK,wCAAR,oCAA3B,sBACA,iFACA,uKACA,iMAC8C,mBAAGA,KAAK,sHAAR,wBAD9C,OAGA,+BACI,sDADJ,iBACmD,mBAAGA,KAAK,mDAAR,gCADnD,4BAC6J,mBAAGA,KAAK,sHAAR,uBAD7J,2EAKJ,2KC1BGsH,GATmB,WAC9B,OACI,gCACI,cAAC,EAAD,iCACA,0CC8LGC,I,MArGC,WACd,OACE,qBAAKzK,UAAU,WAAf,SACE,qBAAKA,UAAU,UAAf,SACE,eAAC,IAAD,WACE,cAAC,IAAD,CAAO0K,OAAK,EAACC,KAAK,uBAAuBC,UAAWvI,IACpD,cAAC,IAAD,CAAOqI,OAAK,EAACC,KAAK,uBAAuBC,UAAWhI,IACpD,cAAC,IAAD,CAAO8H,OAAK,EAACC,KAAK,4BAA4BC,UAAWpH,IACzD,cAAC,IAAD,CAAOkH,OAAK,EAACC,KAAK,6BAA6BC,UAAWnH,IAC1D,cAAC,IAAD,CAAOiH,OAAK,EAACC,KAAK,uBAAuBC,UAAWnG,IACpD,cAAC,IAAD,CAAOiG,OAAK,EAACC,KAAK,sBAAsBC,UAAWjH,IACnD,cAAC,IAAD,CAAO+G,OAAK,EAACC,KAAK,iBAAiBC,UAAWhH,IAC9C,cAAC,IAAD,CAAO8G,OAAK,EAACC,KAAK,mBAAmBC,UAAW7G,IAChD,cAAC,IAAD,CAAO2G,OAAK,EAACC,KAAK,yBAAyBC,UAAW5G,IACtD,cAAC,IAAD,CAAO0G,OAAK,EAACC,KAAK,mBAAmBC,UAAWxG,IAChD,cAAC,IAAD,CAAOsG,OAAK,EAACC,KAAK,wBAAwBC,UAAWtG,IACrD,cAAC,IAAD,CAAOoG,OAAK,EAACC,KAAK,sBAAsBC,UAAWrG,IACnD,cAAC,IAAD,CAAOmG,OAAK,EAACC,KAAK,sBAAsBC,UAAWC,IACnD,cAAC,IAAD,CAAOH,OAAK,EAACC,KAAK,4BAA4BC,UAAWE,KACzD,cAAC,IAAD,CAAOJ,OAAK,EAACC,KAAK,oBAAoBC,UAAWjG,KACjD,cAAC,IAAD,CAAO+F,OAAK,EAACC,KAAK,6BAA6BC,UAAWG,KAC1D,cAAC,IAAD,CAAOL,OAAK,EAACC,KAAK,QAAQC,UAAWI,IAErC,cAAC,IAAD,CAAON,OAAK,EAACC,KAAK,uBAAuBC,UAAW9F,KACpD,cAAC,IAAD,CAAO4F,OAAK,EAACC,KAAK,yBAAyBC,UAAWxF,KACtD,cAAC,IAAD,CAAOsF,OAAK,EAACC,KAAK,uBAAuBC,UAAWvF,KACpD,cAAC,IAAD,CAAOqF,OAAK,EAACC,KAAK,4BAA4BC,UAAWrF,KACzD,cAAC,IAAD,CAAOmF,OAAK,EAACC,KAAK,6BAA6BC,UAAWpF,KAC1D,cAAC,IAAD,CAAOkF,OAAK,EAACC,KAAK,uBAAuBC,UAAW1E,KACpD,cAAC,IAAD,CAAOwE,OAAK,EAACC,KAAK,sBAAsBC,UAAWnF,KACnD,cAAC,IAAD,CAAOiF,OAAK,EAACC,KAAK,iBAAiBC,UAAWlF,KAC9C,cAAC,IAAD,CAAOgF,OAAK,EAACC,KAAK,mBAAmBC,UAAWjF,KAChD,cAAC,IAAD,CAAO+E,OAAK,EAACC,KAAK,yBAAyBC,UAAWhF,KACtD,cAAC,IAAD,CAAO8E,OAAK,EAACC,KAAK,mBAAmBC,UAAW9E,KAChD,cAAC,IAAD,CAAO4E,OAAK,EAACC,KAAK,wBAAwBC,UAAW7E,KACrD,cAAC,IAAD,CAAO2E,OAAK,EAACC,KAAK,sBAAsBC,UAAW5E,KACnD,cAAC,IAAD,CAAO0E,OAAK,EAACC,KAAK,wBAAwBC,UAAW3E,KACrD,cAAC,IAAD,CAAOyE,OAAK,EAACC,KAAK,sBAAsBC,UAAWK,KACnD,cAAC,IAAD,CAAOP,OAAK,EAACC,KAAK,4BAA4BC,UAAWM,KACzD,cAAC,IAAD,CAAOR,OAAK,EAACC,KAAK,oBAAoBC,UAAWxE,KACjD,cAAC,IAAD,CAAOsE,OAAK,EAACC,KAAK,6BAA6BC,UAAWO,KAC1D,cAAC,IAAD,CAAOT,OAAK,EAACC,KAAK,iCAAiCC,UAAWtE,KAC9D,cAAC,IAAD,CAAOoE,OAAK,EAACC,KAAK,uBAAuBC,UAAWQ,KACpD,cAAC,IAAD,CAAOV,OAAK,EAACC,KAAK,yBAAyBC,UAAWpC,KACtD,cAAC,IAAD,CAAOkC,OAAK,EAACC,KAAK,0BAA0BC,UAAWnC,KACvD,cAAC,IAAD,CAAOiC,OAAK,EAACC,KAAK,QAAQC,UAAWS,KAErC,cAAC,IAAD,CAAOX,OAAK,EAACC,KAAK,uBAAuBC,UAAWlC,KACpD,cAAC,IAAD,CAAOgC,OAAK,EAACC,KAAK,yBAAyBC,UAAWU,KACtD,cAAC,IAAD,CAAOZ,OAAK,EAACC,KAAK,uBAAuBC,UAAWW,KACpD,cAAC,IAAD,CAAOb,OAAK,EAACC,KAAK,4BAA4BC,UAAWjC,KACzD,cAAC,IAAD,CAAO+B,OAAK,EAACC,KAAK,6BAA6BC,UAAWhC,KAC1D,cAAC,IAAD,CAAO8B,OAAK,EAACC,KAAK,uBAAuBC,UAAWzB,KACpD,cAAC,IAAD,CAAOuB,OAAK,EAACC,KAAK,sBAAsBC,UAAWY,KACnD,cAAC,IAAD,CAAOd,OAAK,EAACC,KAAK,iBAAiBC,UAAW/B,KAC9C,cAAC,IAAD,CAAO6B,OAAK,EAACC,KAAK,mBAAmBC,UAAW9B,KAChD,cAAC,IAAD,CAAO4B,OAAK,EAACC,KAAK,yBAAyBC,UAAW7B,KACtD,cAAC,IAAD,CAAO2B,OAAK,EAACC,KAAK,mBAAmBC,UAAW3B,KAChD,cAAC,IAAD,CAAOyB,OAAK,EAACC,KAAK,wBAAwBC,UAAWa,KACrD,cAAC,IAAD,CAAOf,OAAK,EAACC,KAAK,sBAAsBC,UAAW1B,KACnD,cAAC,IAAD,CAAOwB,OAAK,EAACC,KAAK,sBAAsBC,UAAWc,KACnD,cAAC,IAAD,CAAOhB,OAAK,EAACC,KAAK,4BAA4BC,UAAWe,KACzD,cAAC,IAAD,CAAOjB,OAAK,EAACC,KAAK,oBAAoBC,UAAWgB,KACjD,cAAC,IAAD,CAAOlB,OAAK,EAACC,KAAK,6BAA6BC,UAAWiB,KAC1D,cAAC,IAAD,CAAOnB,OAAK,EAACC,KAAK,iCAAiCC,UAAWkB,KAC9D,cAAC,IAAD,CAAOpB,OAAK,EAACC,KAAK,uBAAuBC,UAAWmB,KACpD,cAAC,IAAD,CAAOrB,OAAK,EAACC,KAAK,yBAAyBC,UAAWoB,KACtD,cAAC,IAAD,CAAOtB,OAAK,EAACC,KAAK,0BAA0BC,UAAWqB,KACvD,cAAC,IAAD,CAAOvB,OAAK,EAACC,KAAK,QAAQC,UAAWsB,KAErC,cAAC,IAAD,CAAOxB,OAAK,EAACC,KAAK,uBAAuBC,UAAWtB,KACpD,cAAC,IAAD,CAAOoB,OAAK,EAACC,KAAK,yBAAyBC,UAAWrB,KACtD,cAAC,IAAD,CAAOmB,OAAK,EAACC,KAAK,uBAAuBC,UAAWpB,KACpD,cAAC,IAAD,CAAOkB,OAAK,EAACC,KAAK,4BAA4BC,UAAWnB,KACzD,cAAC,IAAD,CAAOiB,OAAK,EAACC,KAAK,6BAA6BC,UAAWlB,KAC1D,cAAC,IAAD,CAAOgB,OAAK,EAACC,KAAK,uBAAuBC,UAAWV,KACpD,cAAC,IAAD,CAAOQ,OAAK,EAACC,KAAK,sBAAsBC,UAAWjB,KACnD,cAAC,IAAD,CAAOe,OAAK,EAACC,KAAK,iBAAiBC,UAAWhB,KAC9C,cAAC,IAAD,CAAOc,OAAK,EAACC,KAAK,mBAAmBC,UAAWf,KAChD,cAAC,IAAD,CAAOa,OAAK,EAACC,KAAK,yBAAyBC,UAAWd,KACtD,cAAC,IAAD,CAAOY,OAAK,EAACC,KAAK,mBAAmBC,UAAWZ,KAChD,cAAC,IAAD,CAAOU,OAAK,EAACC,KAAK,wBAAwBC,UAAWuB,KACrD,cAAC,IAAD,CAAOzB,OAAK,EAACC,KAAK,sBAAsBC,UAAWX,KACnD,cAAC,IAAD,CAAOS,OAAK,EAACC,KAAK,sBAAsBC,UAAWwB,KACnD,cAAC,IAAD,CAAO1B,OAAK,EAACC,KAAK,4BAA4BC,UAAWyB,KACzD,cAAC,IAAD,CAAO3B,OAAK,EAACC,KAAK,oBAAoBC,UAAWR,KACjD,cAAC,IAAD,CAAOM,OAAK,EAACC,KAAK,6BAA6BC,UAAW0B,KAC1D,cAAC,IAAD,CAAO5B,OAAK,EAACC,KAAK,iCAAiCC,UAAWN,KAC9D,cAAC,IAAD,CAAOI,OAAK,EAACC,KAAK,uBAAuBC,UAAW2B,KACpD,cAAC,IAAD,CAAO7B,OAAK,EAACC,KAAK,yBAAyBC,UAAWL,KACtD,cAAC,IAAD,CAAOG,OAAK,EAACC,KAAK,0BAA0BC,UAAWJ,KACvD,cAAC,IAAD,CAAOE,OAAK,EAACC,KAAK,QAAQC,UAAW4B,KAGrC,cAAC,IAAD,CAAU3L,GAAG,mB,MC3KR4L,OAdf,WACE,OACE,sBAAKzM,UAAU,MAAf,UACE,cAAC,EAAD,IACA,qBAAKA,UAAU,YAAf,SACE,sBAAKA,UAAU,MAAf,UACE,cAAC,EAAD,IACA,cAAC,GAAD,aCFV0M,IAASC,OACP,cAAC,IAAMC,WAAP,UACE,cAAC,IAAD,UACE,cAAC,GAAD,QAGJC,SAASC,eAAe,Y","file":"static/js/main.7955eddd.chunk.js","sourcesContent":["export default __webpack_public_path__ + \"static/media/AMAZON.287d5b91.png\";","export default __webpack_public_path__ + \"static/media/googlelogo_color_272x92dp.85707a33.png\";","export default __webpack_public_path__ + \"static/media/MIT-IBM-logo.66d5df33.png\";","export default __webpack_public_path__ + \"static/media/MBUZAI.5a8fcf32.png\";","export default __webpack_public_path__ + \"static/media/Qualcomm.32b66549.jpg\";","export default __webpack_public_path__ + \"static/media/Schmidt1.ee055b56.png\";","export default \"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAVMAAACVCAMAAADSU+lbAAAApVBMVEX////+/v53d3fyUCICpO+AuwH/uQJzc3Nubm5tbW10dHTySxcAou//twD4+Ph5eXnl5eXt7e3zhGlhu++IiIilzVnyRAL3wLT1+Oh4uADV5rb9ymCn2fX8+en957T9y1m5ubna2trIyMiioqKrq6vx8fGAgIC2trbPz8+QkJCampqKiorU1NSenp5lZWWhyl3yfmH30cagyk/f7ce84fT87Mf9yU7gHQ6ZAAAKM0lEQVR4nO2ca7frphGGR25aEG11SZ00vZxG1tXyZfeSpv//pxUEg0DCkk+Wtq21Mu+nswUS6NHADAM+ACQSiUQikUgkEolEIpFIJBKJRCKRSCQSiUQikUgkEolEIpFIJBKJRCKRSCRSQN+u6V//BvjhuzX956d3v8iO9Ps1ff9nyfS3a/rLXyEiacFv1kRMv1bEdHsR0+1FTLcXMd1exHR7EdPtRUy3FzH9xTKLpkABMf1FkjCzU1nesmT+3jtlGvz++xFAXqd8UJzNevrJTJ3EwkIHp3UAkqzZMVWIas4OWvHp1Uwzq/wRIkhyrGLqQFJLA2jnnd2JIDnyA4q/mCkksRVvHiCCy1hnqAJwVV0Wgd7uQgBHdngjU6ftMkwI8rGDqWZaaisQ7U6Z1milgjGXqZ26XsX0ESEoZkzv5kr8yLTfKsgNUhH31T39OJnBJZ2A6e7LmB4CDnKoIw5Tphdk+nAOfqeg090Tx1yBVI5C8TzVdiS+jimrQ4TgxmdMC3OJ7RFpBK22Ap4YA5Wveb6nMXsD08MhEB5Lo5zbaZIOlhA/mIHfK2i0mbJu7F06RFbvYBryUnZycpjKbl9jHj9yam8WZLrHrr9v/fd7IVNxDDDt2JypWqacTjuN+eHEZ5P9+5iGvJTroUam2o1uz2MLIVP+ZqbpIy8FZ+5WSHfK0dVemLa1HuHp1EtBP9hpamYAl2noccvZg1nBSv1oqfThrTifjmMf4C12ajzR1O2Y/rGumDCFaJ6akn/m56Kuqro455EpVJV0RRUilkWZOfWTrBzqd2WWzLkBNDf9uFs+aUv+meuyujw5yRz1L+uj8pG6ZWr0EqZRNUAT1wkmbb8895hC0iodvSSKyq61nLNBnLf1sMaCXlY7Hlv1NkUac84/MoSdVU79tMqm2E69wGJ27JwRJGF3R4Zl8aG/4Qc8D90yk79sFzW58hqmCX5cz0tBpEn34DNtmFBys2iQX2Lm+DPBLwPT41CVFzZVZPwgZH3sRhQHFvfuqgyaPnYf5yyDoam8pmRhq4MQKLlsbLxuNbnyIqZwHBr2vZTxUPw8Y6o/gGUKUDKPkHxSpZkOj5VMe1zPKqbSaLk4TMRkO7bl7OA/b8yawSll01sFr4dhX/JpSVCvYdpgdzwvZTwUhxWmAHU8I+QxLXE5OzAFqGb1h7ICH5izCXLLFMrgrbxSE/fOmCa6q46tSGMZrknbXWNqk2sHIedGroB4TFmXWm6K6VhfzaUjv/hmJka0ajlr6nJkCufYbWocHdJSd8cUKpw7x1LjoaT36JaYjqbDeN+VZVkfY+ExHeJbweOYM8l05CJdU3Euu95OBEJ7tpOpwOpTlp2KiySrmY7JXMbuRVkWd0tVfg8oP9QOFD485ijbPb1D9annT12m2TRUhsRSXmQ6vie/Y/ySXWKPqSrsz1l2qyXTJEVDK02YmfeY6dJ3mXBZfc1BTWd8lDVgLuMKXYQ2L1rpUxMpjPmzxEqHArI5/edT56SjH/65pu9+WmYawdUMU8tUjyQVsy4zxRS1nDfsBAu32mcam+AwaaAz2wRpNtZHMkOkjl2pRh+YJ54Bu03hgB86CuDE/A/j0ycU+UuSx7UWmaKXwhWSeTMu/16aTyHH97z5cVjkMuXW/UCEDNxQzLhD/UlNBpQVbgVtpsJ/2nDR+D8TXM/XUfO1aQDFZvKYGi9l/YT1UNEyU2PDwZQ2MnWmaUwhTMI2s5ITR/UJNVNxmS47c8zguxYCgOmKXNvyOtNX2Wk08VLmz2EVsMAUIpNWD2e0DVNneWCe6+U41GWT++bjNKQa8OuUwSU0GqqOWZ5h+qdV/ReiL39b048/r8ynke+l0EMNA2qJaR4yuynTw7hnGelLboDh8bqB9VHaEzl18Lq/t+h34Rmmf1jV3wG+/O6Pa/rHGlPfS5mPr/uxxPQ8T6vPmDqjGBpn4nRr4heVU6V1RTJiqp08gOngNHcOxq/rVp5h+s2aNNM1PcPUmIoKSgBXQNEaU1MkgvvS8yUvurRZCsxM59pJXW0sz2KbIonMxDnGAxPW/Q6ZGo+svJQxFQNjiSmmXoPeFJmOftqmjKd2DfE4gCF3UyH8iLksB7t368XYr9vAPphaLyXHkAk6TZ5qiWk1T1fPmPKvYKqjfi+HIng3pEgiU6WY3qo7u0+mZk6TQUnj5VOXmN43ZmpGRlJxh2pcLzK9OPPszpjaML/wPNRzdhoKpZaY3vz6FpgZ2MP6dqQ6nCXwsTv39rudT62XkktnHXSaoxzPzKcseM4nwDQPG9vcd6mdg8OYwFKOU/uoaRgWmd6aaWNvTBOM9NjYyRWmpRtwP8G0MYzuE6bmDJG3eQBJgYlA9Xxcmk78oR+e7Y3paHW6E9kTTM0cPFtIPmBqF0mTdRe2LLzLkmo/Ln1tvsqfi/0QeX9MMyerO8bWS0wxc8dDxwJDTBGMF6BC9GBcuwE92vIkQMVlrUgeMDX5gzGP81KmoxV5b72YQ6lCiQ1MJAWY4iLJ36bBNbtet7klYxrCfj/vwIxJ9iDpx0ytb3s1U2f7IU6eYpphUv7ubrTnA8YAU+lpzPR3cerj+GDDVFk72/2G6TD/2u/XOtuoiUni4I5sgKm7zlKdezHTaFwVjiNscT/KHvFlR8zLQ9N9VA+ZnpHf1aaN8ZIZGikvEsy3NWZBqh4Bjfl+Qpzw1gw/kXF6IaamhzJ+U0pezHTM/XgH4ReZ2uOUgvdl1jRZeWFsuhftNGo3QASrzkN93DoZsqeRio34obqp3ZHodnStEPcI1FbM0NT5YgOD5jFTPJwoiqzJivTlTHEUuuf7l/dNne1htY8Xx3y6b+oxHc5c4Fiw9fWfph+tLuJpimVMj9vxezhNaYQYy4WYnuwun7yFHV7NdEwIu35lcd9UFs/33BeYqnecnZg4uD8OamdlmMGWs+fsxMRA0LYQiqXwOLrR65mauc39UckKU2nH8RTSElM5GOZnSQ4std58ytT5KZYMWOeb+IKVM6P0mJ68j/56ppEOPNwYfo2p7HTrm56Il5jKhu/+cSk5KqsxtvKJC546sRPIOdW/VcRXtzzAdDKSPplp4Dd8UH/ISx/eob3uw/0dX6Pv+vCWkeU1NidKhJwKj6VOZre65pSpSpHcGZ5AkTcw91wfnHtVpgqFYHFbeGtRtaefxrp4OIwyJq01U93XDy8BAWWqW5M3fK6Psr839UHPLzWmHnh3TZaRWXFvBWPpsSrxwCiYmoFNAInmXPepYKLtu1syWTA0t+7SqrJLd/KWALo4OQ3FLO3rcvJjYtN9/wXU5VL17nCtb58bS0XB/zfg4SXnd9GhrdjQJu3Cpq2/1/sVZU/eunDL5zL9VYqYbi9iur2I6fYiptuLmG4vYrq9iOn2Iqbbi5huL2K6vT71/OmvVE+dk4YvP67pfz+HzqOTSCQSiUQikUgkEolEIpFIJBKJRCKRSCQSiUQikUgkEolEIpFIJBKJRCKRSCTU/wFH24tLVTubaAAAAABJRU5ErkJggg==\"","export default \"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAcwAAABtCAMAAAAbMqFLAAAAulBMVEX///8AAAA3lvbQ0NA4ODgtLS3u7u7Nzc3X19fp6ekPDw99fX2xsbGTk5Ofn5/CwsJEREQrkvb29vZnZ2fe3t6pqakpKSlubm5YWFjk5OTHx8cjIyPy8vK4uLgmkPYVFRVKSkqNjY01NTV0dHRJSUk9PT3l8f5eXl6ampqPj4/B3Pzu9v4bGxs3mPZorPjS5v12tPiHvfmaxvrd7P1Ro/e31/uVxPqmzft0sviv0fvH4fxKn/dXpveDuvlFDEjYAAAOdElEQVR4nO1cZ3vivBKlg+kYTMdgCCWhJNlsAgH2//+ta2OVUXEjweTN1fmyT8xYludoquRNJBT+41h8vpwPf97uPQ2Fb8D7tpFKpRqp/b0novBlPDlUOmgoNv/rWKcoFveejMLXsG8QLhsf956MwtfwQslM/b33ZBS+hgNws9t7T0bha/gHLPN878koeGN9fH96DpB5NwmX5p9YZvX/jcVifdV9H9tUo5H6++QrpFPL3Kps9sbQP3en7fn1Cj3/QzbXePeTeiVkqjrz1licL5Q0Ur6MyPCH+M+GT6fOZrxhI9UwTcXljbE+YcMxj9HufNvS+nHnKbUzU+bTen/Yveyvc+UK4fFBI9op2p0gsUmdvJz0wbZL/5Cq8H0ARaAZlJey+ANKjhR/69v+Y39MrF9swiO7b4Ur8QbMK2JMY8g8vewBnzaJph0jzztll3HiGTASsXP6BNaBneOYNnl/jhd3S+OwaLIKt8MaWuZnlDs/t4DL025rOglrY3t6fVrQnk8j0pAKX8RfYF0R0s3nXcMpZzDeEovjx/mUujAKWfZOcxW+H09U8dvQ5zrWju2ZZ9Q0aJgoLq6P+53tbwGXKfNW81aQ4EijXkAnh2Jvy5pbm8H3sx0pGwcmLj5/7iCZKmbGhzfHIh1uUv/sMiJUCvT016Z9+6Ff/lg/HQXnfIRxWHUKYsP6lGq8rD9fdh9vlyaq+S9I+Qtnr7nx4ueRYVKldqNjw9oOcWdK395mc+fP5h8762mcAhp/H7Rpq6rM2HAwUydI3nFrO1CfKPe0talMBZcbZ8Sm+fr1OSqEg7M5xTZVF3a572lNbzuHytcQUXD9YZecdo2itkliw6dtPzxxa4cw6XmA9atjlruQ2enb/vDyqbaiY8NTQ9qgebWT2n/i5feTec2ep0IsePY6kvNupsy/nC99PjtUqhM8PxSLk2d6crQLz5PtTo/7189LCbJwgqv5orzmT4VdYHqee3w7N1Lb/c60k5jUYZHYOx72HPEcgkJ8ODTYooTF2tmIRJXi6ZLDqrz058LOcvyPPf6D3fJGmHJEIXas356fF5dDAv41BjxGcFZfyf5I7P+mzMb2EHTU1QmpwC7jmZtCNLyQpmlQlQHP9xximZtCNNAGuE/y4wJGTEXmD8SCbk0FblweqGmqr31+IvbwJF6A7BF87qyaBT8Q8EPJwBMA5HtnU326/hPxGoVMlCw1TEnTXeH++IQparD4+267Pb2oLt7PxALEwVC147Xf4CrEAPrFl/qE+b+PAzq1nFLO8xdgfzJNM3VQzdZfgfXb8Vm5WAUFBQUFBQUFBQUFBQUFBQUFBYX/J7SaGK17T0Xhq8gnMfL3norCV5FWZP4eKDJ/ERSZvwiKzF8EReYvgiLzF0GR+YugyPxFUGQK0Ap5G4Wer9CwV+jbQi39u57aSvdrtX665z9gzhbrpwua/NfoZA7t8fL9dCsXUjrf7xcCWoXDy5ukC8OQU7gd9EKp3h476ii257O8xztqxmQ6KDpCm3m3UhD037pAthpy7k/cHc5Txy4Lg0xHHI9KZS9i43Z9WZYMH41MvWW/xuYiPs5OqrJl1GrR+Wrl7kW6OJ7Pal5MaaPm1H2T8bRreKy5mDB6SDLIriRCvc6YlZpUOQn38lRyr+X+VGAu9rmn1mVPTZQ5qWSzz4tEIrM24cabCOPliuinVqI3Y156ashW3NDihuykg+dxI/S6SQFT4Q0NUSiZZaUQJ5JHVEQy9aZkPIGMnEQq2eEMJAKZPdl4Tc6UcllM5kqcosiTTDOzb4tD0VCWzMVGiREaSgh3YEEh91I4MnX5gOxTEzX5UzfsIgpPpkzvDsqMFCFTigo3piUXu8tunAeXLE8675sIoPbdK+HIlFkIP54XlzYYNkOTKRoaBsOmP5ncgvPSX/EOrpaZS7EI/wJsThghRsqgUu6FUGTWPAcECz+d9JRKQs8Ylsyqh9651wgiMwlzhZ63WOxsauTRm1W/pWm9PEg4yKRL5FJzlNY0rVVbUc1Sf+L+HYpMoq6OM6D9VLpciNGRPCRZrDhz09KjDpHqgtFDkjnyZQjcGkQmpGnmLdWOO24u8ZOBn0ln8EWkfMJ4F9hDeSCo1f07DJlYr8BjaTiIjvFDHvFjgSXkiPLghGWMCGhxyTiHKa3HAsls00H9xGY+s7kBsFIHTPU2xAbQxBesAaf6BEiCCUXun2HIrLLju8D2TyqUwuUJbZahEbJXYJrhyGQyrmmz9DhrDuClDpHkyRw8LCdt5soIi4KMqtidVawOk1z4N2C+G+gFB5x71y8GW5xRt6hVB3ymmchN3bsf8YXwZCLeuPoHWV2bVh7pGV+ckgVYpNdDkUkPCtnvW3PNsGfAOEwGZMmc952+glarg2tkFVIv23HDjZ4GdMZqmlgLZf6Hoe3tVmwfSDd4oUTfvTuL/w5P5oTnw32Eq9omXNAFoeBNIL9BM6VQZIL0GRh1DxgnMU2GTKobC1zF4ZAkGNTzArlOnFETveBcfGZtJBEXkGU5Ck8mWtD8+jAyVjm4F4a4m/MXfMkEWSfMnRI5wCZevZBMqAeaByZr6NJcQubF32VKRryVJn4R0eZCosS+WXgyUcwscmanh1rJOq++MGSSbCo5ZpUMclysB0DmEoriuJKkZRsNxF3qZgrNshZ7BwgrIfqD9WG6as26A3b1hieTqL8UJUfQ9d5oVWqi4DUm0w5DJvWyFvcL5QP7WUAmu9oq5DqOhhZdCsmOEW4T5jZAqVg3WJJi2DdWnUySAQ4r4cmk7ik5r4ZxR61R1eqyGWWROOQQZGobImP0GeRpDlNEywOQyb08uY4ttsBqol6pJe4EtFpDe9mCIe/qXUEm2zjp1HwIHfYtad0XiUzfepAAWRYlM8sNQ9bgBLuFpTDI3ODz71iAphZuMekGvxdFcAWZfNt1U7fkG969klcFH4lM/+4Pdzslk3daxIqz+OGydl5xIt0puy2yARqAYMosDteQmegP+GEmj0Imqz/yQkBlUcj02i5hgYI/JZMvrUmnnpDpsRkwj93dooo5TMwqyWY8RV73KjITuiWOyHn8QkYUsRXlrqtIZEqeJQHqG1IyH7lhSKuekum1tdOMORtyn1oM8dQpO9FN16r2hyT8X0emrbRHbly7PIO9qDz347RTMRy6XGYikSldjAIEMvmzFDIyxYMGLtrx7poMpCqWAGwNzDuPNW2IQgJalCyZc8kAcjKd8zhWl9nZgh0XkLQUJ7NqIYcD0Sw6mXJ980COgZLJO0spmfZMpTvt41gTobAxkwScdoU9fDZi9e/SkpEEf8uDTAetUYfJcfBo9CxCfcQmRw/RyaQxs5j1xFhIgEKS6bzERNyUyXjN5hZA2grq3Ok49xFqGINVv1sHtiUNOT8yHaQNkF+hGI6d7IPAUDE6mTSwbXzeFOEKMhNOK77C88l76VsCqZg/1MID9dMH4vynLJkoMZFQtgwgE8yG1APonqYgiKqBa+vM4OZvZDJ1nEMOy0xwljS9bwa0XGVnIyGQlkUDziVZMptyDVDtgKZBvzTl3xRvkKLmKer3iPpYXUGmRguhYHOJRKZWqHbbD3SWugEaVTH22gvhahPEkfgDfjVMJtKyaOm4skZkpkuXrEfY3BoOgAo112dJtgQfriAzQQ+czLmDmuJqCU9mYdVxvRM0VHB0NdTe0zeB312WY+5BJtY9IRMdDisKkhZLZs2L9Cr4Ie1lR7iZE41McJiLDf1W0uJrs9Bk5siYzPntgvzyjWHwaiHodqhevMgkGxGYTByY+PVIlIPIRIaaFQpcxNPFGhFDwi5AD3sxGZnepZ1OyWSkLipososgvGXSjAd6N1of8zs0twSOJIIvc7RaHyF/hPo8vFekh1BJaYiGa3MsEReHYyZaBnyzjPHTOAZwIkOS9srIbJYkcC3RomQWKUeYHfKyDsKTSe0dnHTXaPNdOMJxSwhnqFwgPzh3LyM1LFkZcKCYTBnv9zWHcklMJvaUnK70LLyMLJB1ijrdO5ORKYWbD2vMpZpuY2iAYn9eIcEzPJk90PEopZ0x9VYJtLViPdFFnD5zWoVo341q2GvA8KXBrR9CZgu/2xz4LYsKkmwWu0rGIZMugbvI8V9QITXQnQ9NZod7K3Q730qk40XIZtljs9P5hvl7EG9/lh5oXyGvnxsRhdVdhknTgHywk0aOc2xd/pGdeeq6B+B6zHYFIbOPrzzU8CpKE72gMEkiD7ZNfYTMspRhlR+OTMnWIwO6/iKQmRP2fiACMstvB92kHD+UVkapS2dXxEGdtvOaRqG/mpGwNXJ/oWQOqRdMDuqTCbdSaZ1JT3FkJ3ZYa9aBnSBzBe08q1YoW0si4q6uyGRqvoqHJ60j1Jmen+o4iPsYydDzkyCwVD0+ASuhlwNh3n9Hn5KZm3sKkROPBQ+BvptfRyYz0fM5qQ6NKFLTwOf7hDi7eS5k30k6GIDemyaVKCVEMqkDZdB2lwwY0lOxAxq95TvKtcS1ZCZynoclmAwwWjvP03vH/HWCC6nO2LJB24gSjjcUyUykJSxZyLaZ3qzc3JcwE5N8tnX53OVaMqUb4jY2LGkRe7MeHwrGb5cXaMLWbZdvpWicNxmXLsmQhExxr9Y5QiEhM1FtJ3lMOQ3kOW9cd5Mh9wDCFWTavltiSBbX0cuRiXmSyWwN5SWhqn6/L+FbFWBPg6asK1ab0Joqa6HcyGhnMpk23/HplWiqMZ6MHFU1p7bggGsCaxZLZ8YQ/gOIYYWKFCdlpPWuPdp0TvRZyGZ8kOV6E/0m89TsTNjLyU2m7q1tvlFioCdNJ2xuY7DnWzbd/p0+g0dI11bNzGRpGXmvebRGle70YVkaBZ8AG+bLpdmyaRm1gLI5v1rW28XNIPtQqnlskOXtFDvb7a78jmNGRH9VWs6zg2y3U/2u/zmoUF5OsslxsZ3tVmp3+O9G/gco7PnhA4gKiQAAAABJRU5ErkJggg==\"","export default __webpack_public_path__ + \"static/media/Picture1.77be32de.png\";","export default __webpack_public_path__ + \"static/media/Picture2.22242a44.png\";","export default __webpack_public_path__ + \"static/media/Picture3.7f5cf0b7.png\";","export default __webpack_public_path__ + \"static/media/science_hub_banner3.bb002d4e.png\";","export default __webpack_public_path__ + \"static/media/apple_logo.b564e2ae.jpg\";","export default __webpack_public_path__ + \"static/media/servicenow_logo.be174726.png\";","export default __webpack_public_path__ + \"static/media/logo_v3.48bc1586.png\";","import React from 'react';\nimport { useLocation } from 'react-router-dom';\nimport Logo from '../../assets/logo_v3.png';\nimport './Header.scss';\n\nconst Header = () => {\n const year = useLocation().pathname.split('/')[1]\n return (\n
\n
\n \"logo\"\n {`${'CLeaR ' + year}`}\n
\n
\n )\n}\n\nexport default Header\n\n","export const navItems = [\n \"Venue and Registration\",\n \"Important Dates\",\n \"Reviewer Instructions\",\n \"AC Instructions\",\n \"Code of Conduct\",\n \"Conflicts of Interest\",\n \"Sponsors\",\n];\n\nexport const callItems = {\n '2022': [\n {\n dropdown: 'Calls',\n items: [\n 'Call for Papers',\n ]\n }\n ],\n '2023': [\n {\n dropdown: 'Calls',\n items: [\n 'Call for Papers',\n 'Call for Datasets',\n ]\n }\n ],\n '2024': [\n {\n dropdown: 'Calls',\n items: [\n 'Call for Papers',\n ]\n }\n ],\n '2025': [\n {\n dropdown: 'Calls',\n items: [\n 'Call for Papers',\n ]\n }\n ],\n\n}\n\n\nexport const programItems = {\n '2022': [\n {\n dropdown: 'Program',\n items: [\n 'Full Agenda',\n 'Plenary Speakers',\n 'Accepted Papers',\n ]\n },\n ],\n '2023': [\n {\n dropdown: 'Program',\n items: [\n 'Full Agenda',\n 'Plenary Speakers',\n 'Accepted Papers',\n 'Accepted Datasets',\n 'Presentation Instructions',\n 'Online Schedule',\n 'Social Activities',\n ]\n }\n ],\n '2024': [\n {\n dropdown: 'Program',\n items: [\n 'Full Agenda',\n ]\n }\n ],\n '2025': [\n {\n dropdown: 'Program',\n items: [\n 'Full Agenda',\n // 'Plenary Speakers',\n // 'Accepted Papers',\n // 'Accepted Datasets',\n // 'Presentation Instructions',\n // 'Online Schedule',\n // 'Social Activities',\n ]\n }\n ],\n\n\n}\n\n\n\n\nexport const yearItems = [\n {\n dropdown: 'Year',\n items: [\n '2025',\n '2024',\n '2023',\n '2022',\n ]\n },\n]\n\nexport const dropItems = {\n '2022': [\n {\n dropdown: 'Organizers',\n items: [\n 'Organizing Committee',\n 'Area Chairs',\n 'Program Committee',\n 'Advisory Board',\n ]\n },\n ],\n '2023': [\n {\n dropdown: 'Organizers',\n items: [\n 'Organizing Committee',\n 'Area Chairs',\n 'Advisory Board',\n 'Student Volunteers',\n ]\n },\n ],\n '2024': [\n {\n dropdown: 'Organizers',\n items: [\n 'Organizing Committee',\n 'Area Chairs',\n 'Advisory Board',\n 'Student Volunteers',\n ]\n },\n ],\n '2025': [\n {\n dropdown: 'Organizers',\n items: [\n 'Organizing Committee',\n 'Area Chairs',\n 'Advisory Board',\n 'Student Volunteers',\n ]\n },\n ]\n\n}\n\n\n","import React from 'react';\nimport { NavLink, useLocation } from 'react-router-dom';\nimport './NavItem.scss';\n\nconst NavItem = ({children}) => {\n const year = useLocation().pathname.split('/')[1]\n return (\n
  • \n {children}\n
  • \n )\n}\n\nexport default NavItem\n","/* eslint-disable jsx-a11y/anchor-is-valid */\nimport React from \"react\";\nimport { Link, useLocation } from 'react-router-dom';\nimport './DropdownItem.scss'\n\nexport const YearItem = ({dropdown, items}) => {\n return (\n
    \n \n {dropdown}\n \n
    \n {items.map((item) => (\n \n {item}\n \n ))}\n
    \n
    \n );\n};\n\nexport const DropdownItem = ({dropdown, items}) => {\n const year = useLocation().pathname.split('/')[1]\n return (\n
    \n \n {dropdown}\n \n
    \n {items.map((item) => (\n \n {item}\n \n ))}\n
    \n
    \n );\n};\n\n","import React from 'react'\nimport { useLocation } from 'react-router-dom';\nimport { navItems, callItems, programItems, yearItems, dropItems } from '../../common/constants/navItems'\nimport NavItem from './components/NavItem/NavItem'\nimport { YearItem, DropdownItem } from './components/DropdownItem/DropdownItem'\nimport './NavBar.scss'\n\nconst NavBar = () => {\n const year = useLocation().pathname.split('/')[1]\n yearItems[0].dropdown = 'Year (' + year + ')'\n // Before redirecting, year may be null string or another string that\n // is not equal to those in yearItems[0].items\n if (year && yearItems[0].items.includes(year)) {\n return (\n
    \n \n
    \n )\n } else {\n return (\n
    \n \n
    \n )\n }\n\n}\n\nexport default NavBar\n\n\n\n","import React from \"react\";\nimport \"./DatesTable.scss\";\n\nconst DatesTable = ({array}) => {\n return (\n \n \n \n \n \n \n \n \n {array.map(({ name, date }) => (\n \n \n \n \n ))}\n \n
    NameDate
    {name}{date}
    \n );\n};\n\nexport default DatesTable;\n","import React from 'react'\nimport './Title.scss'\n\nconst Title = ({children}) => {\n return (\n

    \n {children}\n

    \n )\n}\nexport default Title\n","export const importantDates2022 = [\n { name: \"Paper submission deadline\", date: \" Oct 25, 2021 11:59pm (Anywhere on Earth, AoE)\" },\n { name: \"Reviews released\", date: \" Nov 26, 2021 \" },\n { name: \"Author rebuttals due\", date: \" Dec 3, 2021 11:59pm (AoE)\" },\n { name: \"Final decisions\", date: \" Jan 12, 2022 \" },\n { name: \"Camera ready deadline\", date: \" Feb 20, 2022 11:59pm (AoE)\" },\n { name: \"Main conference\", date: \"Apr 11 - Apr 13, 2022\" },\n];\n\nexport const importantDates2023 = [\n { name: \"Paper submission deadline\", date: \" Oct 28, 2022 11:59pm (Anywhere on Earth, AoE)\" },\n { name: \"Reviews released\", date: \" Dec 2, 2022 \" },\n { name: \"Author rebuttals due\", date: \" Dec 9, 2022 11:59pm (AoE)\" },\n { name: \"Final decisions\", date: \" Jan 12, 2023 \" },\n { name: \"Camera ready deadline\", date: \" Feb 20, 2023 11:59pm (AoE)\" },\n { name: \"Main conference\", date: \"Apr 11 - Apr 14 2023\" },\n { name: \"Final Camera ready deadline\", date: \" May 26, 2023 11:59pm (AoE)\" },\n];\n\nexport const importantDates2024 = [\n { name: \"Paper submission deadline\", date: \" Oct 27, 2023 11:59pm (Anywhere on Earth, AoE)\" },\n { name: \"Reviews released\", date: \" Dec 1, 2023 \" },\n { name: \"Author rebuttals due\", date: \" Dec 8, 2023 11:59pm (AoE)\" },\n { name: \"Final decisions\", date: \" Jan 12, 2024 \" },\n { name: \"Camera ready deadline\", date: \" Feb 20, 2024 11:59pm (AoE)\" },\n { name: \"Main conference\", date: \"Apr 1 - Apr 3 2024\" },,\n];\n\nexport const importantDates2025 = [\n { name: \"Paper submission deadline\", date: \" Nov 4, 2024 11:59pm (Anywhere on Earth, AoE)\" },\n { name: \"Reviews released\", date: \" Dec 13, 2024 \" },\n { name: \"Author rebuttals due\", date: \" Dec 23, 2024 11:59pm (AoE)\" },\n { name: \"Final decisions\", date: \" Jan 27, 2025 \" },\n { name: \"Camera ready deadline\", date: \" Mar 9, 2025 11:59pm (AoE)\" },\n { name: \"Main conference\", date: \"May 7 (Wed) - 9 (Fri), 2025.\" },,\n];\n","import React from 'react'\nimport DatesTable from './components/DateTable/DatesTable'\nimport Title from '../../../components/Title/Title'\nimport { importantDates2022 } from '../../../common/constants/importantDates'\n\nconst ImportantDates2022page = () => {\n return (\n <>\n Important Dates\n \n \n )\n}\n\nexport default ImportantDates2022page;\n","import React from \"react\";\nimport ShowMoreText from 'react-show-more-text';\nimport \"./PaperTable.scss\";\n\nconst PaperTable = ({ array }) => {\n return (\n
    \n {array.map(({title, author ,description}) => (\n \n \n

    {title}

    \n {author}\n {description}\n \n \n ))}\n
    \n );\n};\n\nexport default PaperTable;\n","const acceptedPapers = [\n {\n title: \"Causal Explanations and XAI\",\n author:\n \"Sander Beckers (University of Tuebingen)\",\n },\n {\n title: \"Differentially Private Estimation of Heterogeneous Causal Effects\",\n author:\n \"Fengshi Niu (Stanford University), Harsha Nori (Microsoft), Brian Quistorff (University of Maryland, College Park), Rich Caruana (Carnegie Mellon University), Donald Ngwe (Microsoft), Aadharsh Kannan (Microsoft)\",\n },\n {\n title: \"Evidence-Based Policy Learning\",\n author:\n \"Jann Spiess (Stanford University), Vasilis Syrgkanis (Microsoft)\",\n },\n {\n title: \"Interactive rank testing by betting\",\n author:\n \"Boyan Duan (Carnegie Mellon University), Aaditya Ramdas (Carnegie Mellon University), Larry Wasserman (Carnegie Mellon University)\",\n },\n {\n title: \"Learning Causal Overhypotheses through Exploration in Children and Computational Models\",\n author:\n \"Eliza Kosoy (University of California Berkeley), Jasmine L Collins (University of California Berkeley), David Chan (University of California Berkeley), Jessica B Hamrick (DeepMind), Nan Rosemary Ke (DeepMind), Sandy Huang (DeepMind), adrian liu (University of California, Berkeley), John Canny (University of California, Berkeley), Alison Gopnik (University of California, Berkeley)\",\n },\n {\n title: \"Non-parametric Inference Adaptive to Intrinsic Dimension\",\n author:\n \"Khashayar Khosravi (Google), Greg Lewis (Microsoft), Vasilis Syrgkanis (Microsoft)\",\n },\n {\n title: \"Optimal Training of Fair Predictive Models\",\n author:\n \"Razieh Nabi (Emory University), Daniel Malinsky (Columbia University), Ilya Shpitser (Johns Hopkins University)\",\n }, \n {\n title: \"Selection, Ignorability and Challenges With Causal Fairness\",\n author:\n \"Jake Fawkes (University of Oxford), Robin Evans (University of Oxford), Dino Sejdinovic (University of Oxford)\",\n },\n {\n title: \"Typing assumptions improve identification in causal discovery\",\n author:\n \"PHILIPPE BROUILLARD (University of Montreal), Perouz Taslakian (Samsung), Alexandre Lacoste (Element AI), Sebastien Lachapelle (University of Montreal), Alexandre Drouin (Laval University)\",\n },\n];\n\nexport default acceptedPapers;\n","const acceptedPapers = [\n {\n title: \"A Distance Covariance-based Kernel for Nonlinear Causal Clustering in Heterogeneous Populations\",\n author:\n \"Alex Markham (KTH Royal Institute of Technology), Richeek Das (Indian Institute of Technology), Moritz Grosse-Wentrup (University of Vienna)\",\n }, \n {\n title: \"A Multivariate Causal Discovery based on Post-Nonlinear Model\",\n author:\n \"Kento Uemura (RIKEN), Takuya Takagi (Fujitsu Ltd.), Takayuki Kambayashi, Hiroyuki Yoshida, Shohei Shimizu (Shiga University)\",\n },\n {\n title: \"A Uniformly Consistent Estimator of non-Gaussian Causal Effects Under the $k$-Triangle-Faithfulness Assumption\",\n author:\n \"Shuyan Wang (Carnegie Mellon University), Peter Spirtes (Carnegie Mellon University)\",\n },\n {\n title: \"Amortized Causal Discovery: Learning to Infer Causal Graphs from Time-Series Data\",\n author:\n \"Sindy Löwe (University of Amsterdam), David Madras (University of Toronto), Richard Zemel (Columbia University), Max Welling (University of Amsterdam)\",\n }, \n {\n title: \"Attainability and Optimality: The Equalized Odds Fairness Revisited\",\n author:\n \"Zeyu Tang (Carnegie Mellon University), Kun Zhang (Carnegie Mellon University)\",\n },\n {\n title: \"Bivariate Causal Discovery via Conditional Divergence\",\n author:\n \"Bao Duong (Deakin University), Thin Nguyen (Deakin University)\",\n }, \n {\n title: \"Can Humans Be out of the Loop?\",\n author:\n \"Junzhe Zhang (Columbia University), Elias Bareinboim (Columbia University)\",\n },\n {\n title: \"Causal Bandits without prior knowledge using separating sets\",\n author:\n \"Arnoud De Kroon (University of Amsterdam), Joris Mooij (University of Amsterdam), Danielle Belgrave (DeepMind)\",\n },\n {\n title: \"Causal Discovery for Linear Mixed Data\",\n author:\n \"Yan Zeng (Tsinghua University), Shohei Shimizu (Shiga University), Hidetoshi Matsui (Kyushu University), Fuchun Sun (Tsinghua University)\",\n }, \n {\n title: \"Causal Discovery in Linear Structural Causal Models with Deterministic Relations\",\n author:\n \"Yuqin Yang (Georgia Institute of Technology), Mohamed S Nafea (University of Detroit Mercy), AmirEmad Ghassami (Johns Hopkins University), Negar Kiyavash (Swiss Federal Institute of Technology Lausanne)\",\n },\n {\n title: \"Causal Imputation via Synthetic Interventions\",\n author:\n \"Chandler Squires (MIT), Dennis Shen (MIT), Anish Agarwal (MIT), Devavrat Shah (MIT), Caroline Uhler (MIT)\",\n },\n {\n title: \"Causal Markov Decision Processes: Learning Good Interventions Efficiently\",\n author:\n \"Yangyi Lu (University of Michigan), Amirhossein Meisami (University of Michigan), Ambuj Tewari (University of Michigan)\",\n }, \n {\n title: \"Causal Structure Discovery between Clusters of Nodes Induced by Latent Factors\",\n author:\n \"Chandler Squires (Massachusetts Institute of Technology), Annie Yun (MIT), Eshaan Nichani (Princeton University), Raj Agrawal (MIT), Caroline Uhler (MIT)\",\n },\n {\n title: \"CausalCity: Complex Simulations with Agency for Causal Discovery and Reasoning\",\n author:\n \"Daniel McDuff (Microsoft), Yale Song (Microsoft), Jiyoung Lee (Yonsei university), Vibhav Vineet (Microsoft), Sai Vemprala (Microsoft), Nicholas Alexander Gyde (University of Washington), Hadi Salman (Massachusetts Institute of Technology), Shuang Ma (Microsoft), Kwanghoon Sohn (Yonsei University), Ashish Kapoor (Microsoft)\",\n },\n {\n title: \"Data-driven exclusion criteria for instrumental variable studies\",\n author:\n \"Tony Liu (University of Pennsylvania), Patrick Lawlor, Lyle Ungar (University of Pennsylvania), Konrad Kording (University of Pennsylvania)\",\n },\n {\n title: \"Differentiable Causal Discovery Under Latent Interventions\",\n author:\n \"Gonçalo Rui Alves Faria (Instituto Superior Técnico), Andre Martins (Instituto Superior Técnico), Mario A. T. Figueiredo (Instituto Superior Técnico)\",\n },\n {\n title: \"Diffusion Causal Models for Counterfactual Estimation\",\n author:\n \"Pedro Sanchez (University of Edinburgh), Sotirios A. Tsaftaris (University of Edinburgh)\",\n }, \n {\n title: \"Disentanglement via Mechanism Sparsity Regularization: A New Principle for Nonlinear ICA\",\n author:\n \"Sebastien Lachapelle (University of Montreal), Pau Rodriguez (Element AI), Rémi LE PRIOL (University of Montreal), Yash Sharma (University of Tuebinge), Katie E Everett (Massachusetts Institute of Technology), Alexandre Lacoste (Element AI), Simon Lacoste-Julien (University of Montreal)\",\n },\n {\n title: \"Disentangling Controlled Effects for Hierarchical Reinforcement Learning\",\n author:\n \"Oriol Corcoll Andreu (University of Tartu), Raul Vicente (University of Tartu)\",\n }, \n {\n title: \"Equality Constraints in Linear Hawkes Processes\",\n author:\n \"Søren Wengel Mogensen (Lund University / Lund Institute of Technology)\",\n },\n {\n title: \"Estimating Social Influence from Observational Data\",\n author:\n \"Dhanya Sridhar (Columbia University), Caterina De Bacco (Max-Planck Institute), David Blei (Columbia University)\",\n },\n {\n title: \"Fair Classification with Instance-dependent Label Noise\",\n author:\n \"Songhua Wu (University of Sydney ), Mingming Gong (University of Melbourne), Bo Han (HKBU), Yang Liu (University of California, Santa Cru), Tongliang Liu (University of Sydney )\",\n }, \n {\n title: \"Identifying cause versus effect in time series via spectral independence: theoretical foundations\",\n author:\n \"Michel Besserve (MPI for Intelligent Systems), Naji Shajarisales (Carnegie Mellon University), Dominik Janzing (Amazon), Bernhard Schölkopf (Max Planck Institute for Intelligent Systems)\",\n },\n {\n title: \"Identifying Coarse-grained Independent Causal Mechanisms with Self-supervision\",\n author:\n \"Xiaoyang Wang (University of Illinois, Urbana Champaign), Klara Nahrstedt (UIUC), Oluwasanmi O Koyejo (UIUC)\",\n },\n {\n title: \"Identifying Principal Stratum Causal Effects Conditional on a Post-treatment Intermediate Response\",\n author:\n \"Xiaoqing Tan (University of Pittsburgh), Judah Abberbock, Priya Rastogi (University of Pittsburgh), Gong Tang (University of Pittsburgh)\",\n },\n {\n title: \"Info Intervention and its Causal Calculus\",\n author:\n \"Gong Heyang (University of Science and Technology of China)\",\n }, \n {\n title: \"Integrative $R$-learner of heterogeneous treatment effects combining experimental and observational studies\",\n author:\n \"Lili Wu (North Carolina State University), Shu Yang (North Carolina State University)\",\n },\n {\n title: \"Learning Invariant Representations with Missing Data\",\n author:\n \"Mark Goldstein (New York University), Joern-Henrik Jacobsen (Vector Institute), Olina Chau (Stanford University), Adriel Saporta (Stanford University), Aahlad Manas Puli (New York University), Rajesh Ranganath (New York University), Andrew Miller (Columbia University)\",\n },\n {\n title: \"Local Constraint-Based Causal Discovery under Selection Bias\",\n author:\n \"Philip Versteeg (University of Amsterdam), Joris Mooij (University of Amsterdam), Cheng Zhang (Microsoft)\",\n },\n {\n title: \"On the Equivalence of Causal Models\",\n author:\n \"Jun Otsuka (Kyoto University), Hayato Saigo\",\n }, \n {\n title: \"Partial Identification with Noisy Covariates: A Robust Optimization Approach\",\n author:\n \"Wenshuo Guo (University of California Berkeley), Mingzhang Yin (Columbia University), Yixin Wang (University of Michigan), Michael Jordan (University of California Berkeley)\",\n },\n {\n title: \"Predictive State Propensity Subclassification: A causal inference algorithm for optimal data-driven propensity score stratification\",\n author:\n \"Joseph Kelly (Google), Jing Kong (Google), Georg M. Goerg (EvolutionIQ)\",\n }, \n {\n title: \"Process Independence Testing in Proximal Graphical Event Models\",\n author:\n \"Debarun Bhattacharjya (IBM), Karthikeyan Shanmugam (IBM), Tian Gao (IBM), Dharmashankar Subramanian (IBM)\",\n }, \n {\n title: \"Relational Causal Models with Cycles: Representation and Reasoning\",\n author:\n \"Ragib Ahsan (University of Illinois, Chicago), David Arbour (Adobe Systems), Elena Zheleva (University of Illinois, Chicago)\",\n },\n {\n title: \"Same Cause; Different Effects in the Brain\",\n author:\n \"Mariya Toneva (Carnegie Mellon University), Jennifer Williams (Carnegie Mellon University), Anand Bollu (Carnegie Mellon University), Leila Wehbe (Carnegie Mellon University)\",\n },\n {\n title: \"Simple data balancing achieves competitive worst-group-accuracy\",\n author:\n \"Badr Youbi Idrissi (Facebook), Martin Arjovsky (New York University), Mohammad Pezeshki (University of Montrea), David Lopez-Paz (Facebook)\",\n }, \n {\n title: \"Some Reflections on Drawing Causal Inference using Textual Data: Parallels Between Human Subjects and Organized Texts\",\n author:\n \"Bo Zhang (University of Pennsylvania), Jiayao Zhang (University of Pennsylvania)\",\n },\n {\n title: \"Towards efficient representation identification in supervised learning\",\n author:\n \"Kartik Ahuja (Montreal Institute for Learning Algorithms), Divyat Mahajan (Montreal Institute for Learning Algorithms), Vasilis Syrgkanis (Microsoft), Ioannis Mitliagkas (University of Montreal)\",\n },\n {\n title: \"VIM: Variational Independent Modules for Video Prediction\",\n author:\n \"Rim Assouel (University of Montreal), Lluis Castrejon (University of Montreal), Aaron Courville (University of Montreal), Nicolas Ballas (Facebook), Yoshua Bengio (University of Montreal)\",\n },\n {\n title: \"Weakly Supervised Discovery of Semantic Attributes\",\n author:\n \"Ameen Ali Ali (Tel Aviv University), Tomer Galanti (DeepMind), Evgenii Zheltonozhskii (Technion), Chaim Baskin (Technion), Lior Wolf (Tel Aviv University)\",\n },\n];\n\nexport default acceptedPapers;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport PaperTable from \"./components/PaperTable/PaperTable\";\nimport acceptedPapers_oral from \"../../../common/constants/acceptedPapers_oral\";\nimport acceptedPapers_poster from \"../../../common/constants/acceptedPapers_poster\";\n\nconst AcceptedPapers2022page = () => {\n return (\n
    \n Accepted Papers\n
    Oral
    \n \n
    Poster
    \n \n
    \n\n );\n};\n\nexport default AcceptedPapers2022page;\n","import React from \"react\";\nimport \"./OCTable.scss\";\n\nconst OCTable = ({ array }) => {\n return (\n \n {array.map(({ position, persons }) => (\n \n \n \n ))}\n
    \n

    {position}

    \n {persons.map(({ name, href, address }) => (\n
    \n \n {name}\n \n
    \n {address}\n
    \n ))}\n
    \n );\n};\n\nexport default OCTable;\n","export const array2022 = [\n {\n position: \"General and Program Chairs\",\n persons: [\n {\n name: \"Bernhard Schölkopf\",\n href: \"https://www.is.mpg.de/~bs\",\n address: \"Max Planck Institute for Intelligent Systems, Germany\",\n },\n {\n name: \"Caroline Uhler\",\n href: \"https://www.carolineuhler.com/\",\n address: \"Massachusetts Institute of Technology, USA\",\n },\n {\n name: \"Kun Zhang\",\n href: \"http://www.andrew.cmu.edu/user/kunz1/\",\n address: \"Carnegie Mellon University, USA\",\n },\n ],\n },\n {\n position: \"Logistics and Conference Planning Team\",\n persons: [\n {\n name: \"Silvia Chiappa (Co-Chair)\",\n href: \"https://csilviavr.github.io/\",\n address: \"DeepMind, UK\",\n },\n {\n name: \"Frederick Eberhardt (Co-Chair)\",\n href: \"http://www.its.caltech.edu/~fehardt/\",\n address: \"California Institute of Technology, USA\",\n },\n {\n name: \"Patrick Burauel\",\n href: \"https://patrick-burauel.netlify.app/\",\n address: \"California Institute of Technology, USA\",\n },\n {\n name: \"Julius von Kügelgen\",\n href: \"https://sites.google.com/view/julius-von-kuegelgen/home\",\n address: \"Max Planck Institute for Intelligent Systems, Germany\",\n },\n ],\n },\n {\n position: \"Publication Chairs\",\n persons: [\n {\n name: \"Zhijing Jin\",\n href: \"https://zhijing-jin.com/fantasy/\",\n address: \"Max Planck Institute for Intelligent Systems, Germany\",\n },\n {\n name: \"Alexander D'Amour\",\n href: \"https://www.alexdamour.com/\",\n address: \"Google Brain, USA\",\n },\n ],\n },\n {\n position: \"Communication Chairs\",\n persons: [\n {\n name: \"Liam Solus\",\n href: \"https://people.kth.se/~solus/\",\n address: \"KTH Royal Institute of Technology, Sweden\",\n },\n {\n name: \"Nan Rosemary Ke\",\n href: \"https://nke001.github.io/\",\n address: \"Mila, Canada\",\n },\n ],\n },\n {\n position: \"Sponsorship Chairs\",\n persons: [\n {\n name: \"Sara Magliacane\",\n href: \"https://smaglia.wordpress.com/\",\n address: \"University of Amsterdam, Netherlands\",\n },\n {\n name: \"Francesco Locatello\",\n href: \"https://scholar.google.com/citations?user=wQanfTIAAAAJ&hl=en\",\n address: \"Institute of Science and Technology, Austria\",\n },\n ],\n },\n {\n position: \"Social Chairs\",\n persons: [\n {\n name: \"Stefan Bauer\",\n href: \"https://scholar.google.com/citations?user=O-oICE8AAAAJ&hl=de\",\n address: \"KTH Royal Institute of Technology, Sweden\",\n },\n {\n name: \"Atalanti Mastakouri\",\n href: \"https://www.is.mpg.de/person/amastakouri\",\n address: \"Amazon Research Tübingen, Germany\",\n },\n ],\n },\n {\n position: \"Workflow Chairs\",\n persons: [\n {\n name: \"Biwei Huang\",\n href: \"https://biweihuang.com/\",\n address: \"Carnegie Mellon University, USA\",\n },\n {\n name: \"Daniel Malinsky\",\n href: \"http://www.dmalinsky.com/\",\n address: \"Columbia University, USA\",\n },\n ],\n },\n {\n position: \"Online Chairs\",\n persons: [\n {\n name: \"Christina Heinze-Deml\",\n href: \"https://stat.ethz.ch/~heinzec/\",\n address: \"ETH Zurich, Switzerland\",\n },\n {\n name: \"Sebastian Weichwald\",\n href: \"https://sweichwald.de/\",\n address: \"University of Copenhagen, Denmark\",\n },\n ],\n },\n {\n position: \"Publicity Chair\",\n persons: [\n {\n name: \"Mingming Gong\",\n href: \"https://mingming-gong.github.io/\",\n address: \"The University of Melbourne, Australia\",\n },\n ],\n },\n];\n\nexport const array2023 = [\n {\n position: \"General Chairs\",\n persons: [\n {\n name: \"Francesco Locatello\",\n href: \"https://www.francescolocatello.com/\",\n address: \"Institute of Science and Technology, Austria\",\n },\n {\n name: \"Peter Spirtes\",\n href: \"https://www.cmu.edu/dietrich/philosophy/people/faculty/spirtes.html\",\n address: \"CMU\",\n },\n ],\n },\n {\n position: \"Program Chairs\",\n persons: [\n {\n name: \"Mihaela van der Schaar\",\n href: \"https://www.vanderschaar-lab.com/\",\n address: \"University of Cambridge\",\n },\n {\n name: \"Cheng Zhang\",\n href: \"https://cheng-zhang.org/\",\n address: \"Microsoft Research\",\n },\n {\n name: \"Dominik Janzing\",\n href: \"https://janzing.github.io/\",\n address: \"Amazon Web Services\",\n },\n ],\n },\n {\n position: \"Sponsorship Chairs\",\n persons: [\n {\n name: \"Alexandre d'Amour\",\n href: \"https://www.alexdamour.com/\",\n address: \"Google\",\n },\n {\n name: \"Dhanya Sridhar\",\n href: \"https://www.dsridhar.com/\",\n address: \"University of Montreal\",\n },\n ],\n },\n {\n position: \"Social Chairs\",\n persons: [\n {\n name: \"Atalanti Mastakouri\",\n href: \"https://atalanti.github.io/\",\n address: \"Amazon Web Services\",\n },\n {\n name: \"Julius von Kugelgen\",\n href: \"https://sites.google.com/view/julius-von-kuegelgen/home\",\n address: \"Max Planck Institute for Intelligent Systems\",\n },\n ],\n },\n {\n position: \"Publication Chairs\",\n persons: [\n {\n name: \"Michel Besserve\",\n href: \"https://ei.is.mpg.de/person/besserve\",\n address: \"Max Planck Institute for Intelligent Systems\",\n },\n {\n name: \"Amit Sharma\",\n href: \"http://www.amitsharma.in/\",\n address: \"Microsoft Research\",\n },\n ],\n },\n {\n position: \"Communication Chairs\",\n persons: [\n {\n name: \"Sara Magliacane\",\n href: \"https://smaglia.wordpress.com/\",\n address: \"University of Amsterdam\",\n },\n {\n name: \"Biwei Huang\",\n href: \"https://biweihuang.com/\",\n address: \"UC San Diego\",\n },\n ],\n },\n {\n position: \"Workflow Chairs\",\n persons: [\n {\n name: \"Elena Zheleva\",\n href: \"https://www.cs.uic.edu/~elena/\",\n address: \"University of Illinois at Chicago\",\n },\n {\n name: \"Daniel Malinsky\",\n href: \"http://www.dmalinsky.com/\",\n address: \"Columbia University\",\n },\n ],\n },\n {\n position: \"Logistics and Conference Planning Team\",\n persons: [\n {\n name: \"Chaochao Lu\",\n href: \"https://causallu.com/\",\n address: \"University of Cambridge\",\n },\n {\n name: \"Dominik Zietlow\",\n href: \"https://scholar.google.de/citations?user=jkIx0f8AAAAJ&hl=de\",\n address: \"Amazon Web Services\",\n },\n {\n name: \"Jeroen Berrevoets\",\n href: \"https://www.maths.cam.ac.uk/person/jb2384\",\n address: \"University of Cambridge\",\n },\n ],\n },\n {\n position: \"Online Chairs\",\n persons: [\n {\n name: \"David Arbour\",\n href: \"http://darbour.github.io/\",\n address: \"Adobe\",\n },\n {\n name: \"Johann Brehmer\",\n href: \"https://johannbrehmer.github.io/\",\n address: \"Qualcomm AI Research Amsterdam\",\n },\n ],\n },\n {\n position: \"Publicity Chairs\",\n persons: [\n {\n name: \"Ignavier Ng\",\n href: \"https://ignavierng.github.io/\",\n address: \"CMU\",\n },\n {\n name: \"Guangyi Chen\",\n href: \"https://chengy12.github.io/\",\n address: \"MBZUAI and CMU\",\n },\n ],\n },\n {\n position: \"Dataset Chairs\",\n persons: [\n {\n name: \"Ricardo Monti\",\n href: \"http://www.gatsby.ucl.ac.uk/~ricardom/\",\n address: \"University College London\",\n },\n {\n name: \"Chao Ma\",\n href: \"https://chao-ma.org/\",\n address: \"University of Cambridge\",\n },\n ],\n },\n];\n\nexport const array2024 = [\n {\n position: \"General Chairs\",\n persons: [\n {\n name: \"Aditya Grover\",\n href: \"https://aditya-grover.github.io/\",\n address: \"UCLA\",\n },\n {\n name: \"Cheng Zhang\",\n href: \"https://cheng-zhang.org/\",\n address: \"Microsoft Research\",\n },\n ],\n },\n {\n position: \"Program Chairs\",\n persons: [\n {\n name: \"Vanessa Didelez\",\n href: \"https://people.maths.bris.ac.uk/~maxvd/\",\n address: \"University of Bremen\",\n },\n {\n name: \"Francesco Locatello\",\n href: \"https://www.francescolocatello.com/\",\n address: \"Institute of Science and Technology, Austria\",\n },\n ],\n },\n {\n position: \"Sponsorship Chairs\",\n persons: [\n {\n name: \"Silvia Chiappa\",\n href: \"https://csilviavr.github.io/\",\n address: \"DeepMind, UK\",\n },\n {\n name: \"Wei Chen\",\n href: \"https://weichen-cas.github.io\",\n address: \"Chinese Academy of Sciences\",\n },\n {\n name: \"Dhanya Sridhar\",\n href: \"https://www.dsridhar.com\",\n address: \"Université de Montréal\",\n },\n ],\n },\n {\n position: \"Social Chairs\",\n persons: [\n {\n name: \"Zachary Lipton\",\n href: \"https://www.zacharylipton.com/\",\n address: \"CMU\",\n },\n {\n name: \"Elena Zheleva\",\n href: \"https://www.cs.uic.edu/~elena/\",\n address: \"University of Illinois at Chicago\",\n },\n {\n name: \"Fiona Guo\",\n address: \"University of South California\",\n },\n ],\n },\n {\n position: \"Publication Chairs\",\n persons: [\n {\n name: \"Joris Mooij\",\n href: \"https://staff.fnwi.uva.nl/j.m.mooij/\",\n address: \"University of Amsterdam\",\n },\n {\n name: \"Sourbh Bhadane\",\n href: \"http://sourbhbh.github.io\",\n address: \"Cornell University\",\n },\n ],\n },\n {\n position: \"Communication Chairs\",\n persons: [\n {\n name: \"Mingming Gong\",\n href: \"https://mingming-gong.github.io\",\n address: \"University of Melbourne\",\n },\n {\n name: \"Matt Kusner\",\n href: \"https://mkusner.github.io\",\n address: \"University College London\",\n },\n ],\n },\n {\n position: \"Workflow Chairs\",\n persons: [\n {\n name: \"Virginia Aglietti\",\n href: \"https://virgiagl.github.io/\",\n address: \"DeepMind\",\n },\n {\n name: \"Vasilis Syrgkanis\",\n href: \"https://vsyrgkanis.com/\",\n address: \"Stanford University\",\n },\n {\n name: \"Ruibo Tu\",\n href: \"https://www.kth.se/profile/ruibo?l=en\",\n address: \"KTH Royal Institute of Technology\",\n },\n ],\n },\n {\n position: \"Logistics and Conference Planning Team\",\n persons: [\n {\n name: \"Krikamol Muandet\",\n href: \"https://www.krikamol.org/\",\n address: \"CISPA Helmholtz Center for Information Security\",\n },\n {\n name: \"Chi Zhang\",\n href: \"https://web.cs.ucla.edu/~zccc/\",\n address: \"UCLA\",\n },\n ],\n },\n {\n position: \"Online Chairs\",\n persons: [\n {\n name: \"Gaurav Sinha\",\n href: \"https://sinhagaurav.github.io/\",\n address: \"Microsoft Research India\",\n },\n {\n name: \"Biwei Huang\",\n href: \"https://biweihuang.com\",\n address: \"UCSD\",\n },\n ],\n },\n {\n position: \"Website Chairs\",\n persons: [\n {\n name: \"Maggie Makar\",\n href: \"https://mymakar.github.io/\",\n address: \"University of Michigan, Ann Arbor\",\n },\n {\n name: \"Ang Li\",\n href: \"https://www.causalds.org\",\n address: \"Florida State University\",\n },\n ],\n },\n\n {\n position: \"Dataset Chairs\",\n persons: [\n {\n name: \"Ricardo Monti\",\n href: \"http://www.gatsby.ucl.ac.uk/~ricardom/\",\n address: \"University College London\",\n },\n {\n name: \"Cian Eastwood\",\n href: \"https://cianeastwood.github.io/\",\n address: \"University of Edinburgh and Max Planck Institute, Tübingen\",\n },\n ],\n },\n {\n position: \"Operations Chairs\",\n persons: [\n {\n name: \"Hritik Bansal\",\n },\n {\n name: \"Daniel Israel\",\n },\n {\n name: \"Shufan Li\",\n },\n {\n name: \"Tung Nguyen\",\n },\n {\n name: \"Prachee Sharma\",\n },\n {\n name: \"Jia-Hang Sha\",\n },\n {\n name: \"Siyan Zhao\",\n },\n ],\n },\n];\n\n\n\nexport const array2025 = [\n {\n position: \"General Chairs\",\n persons: [\n {\n name: \"Negar Kiyavash\",\n href: \"https://people.epfl.ch/negar.kiyavash?lang=en\",\n address: \"EPFL\",\n },\n {\n name: \"Jin Tian\",\n href: \"https://mbzuai.ac.ae/study/faculty/jin-tian/\",\n address: \"MBZUAI\",\n },\n ],\n },\n {\n position: \"Program Chairs\",\n persons: [\n {\n name: \"Mathias Drton\",\n href: \"https://www.professoren.tum.de/en/drton-mathias\",\n address: \"Technical University of Munich\",\n },\n {\n name: \"Biwei Huang\",\n href: \"https://biweihuang.com/\",\n address: \"University of California San Diego\",\n },\n ],\n },\n {\n position: \"Communication Chairs\",\n persons: [\n {\n name: \"Sara Magliacane\",\n href: \"https://saramagliacane.github.io/\",\n address: \"University of Amsterdam\",\n },\n {\n name: \"Zhijing Jing\",\n href: \"https://zhijing-jin.com/fantasy/\",\n address: \"University of Toronto\",\n },\n ],\n },\n {\n position: \"Workflow Chairs\",\n persons: [\n {\n name: \"Virginia Aglietti\",\n href: \"https://virgiagl.github.io/\",\n address: \"DeepMind\",\n },\n {\n name: \"Urmi Ninad\",\n href: \"https://climateinformaticslab.com/about/\",\n address: \"TU Berlin\",\n },\n {\n name: \"Yujia Zheng\",\n href: \"http://yjzheng.com/\",\n address: \"Carnegie Mellon University\",\n },\n ],\n },\n {\n position: \"Website Chairs\",\n persons: [\n {\n name: \"Maggie Makar\",\n href: \"https://mymakar.github.io/\",\n address: \"University of Michigan, Ann Arbor\",\n },\n {\n name: \"Zeyu Tang\",\n href: \"https://zeyu.one/\",\n address: \"Carnegie Mellon University\",\n },\n ],\n },\n {\n position: \"Online Chairs\",\n persons: [\n {\n name: \"Sadegh Khorasani\",\n href: \"https://people.epfl.ch/sadegh.khorasani?lang=en\",\n address: \"EPFL\",\n },\n {\n name: \"Nils Sturma\",\n href: \"https://nilssturma.github.io/\",\n address: \"TU Munich\",\n }\n ]\n },\n {\n position: \"Publication Chairs\",\n persons: [\n {\n name: \"Sourbh Bhadane\",\n href: \"http://sourbhbh.github.io/\",\n address: \"University of Amsterdam\",\n },\n {\n name: \"Thijs van Ommen\",\n href: \"https://www.uu.nl/staff/MOmmen\",\n address: \"Utrecht University\",\n },\n {\n name: \"Chandler Squires\",\n href: \"https://www.chandlersquires.com\",\n address: \"Carnegie Mellon University\",\n },\n ],\n },\n];\n\n","import React from 'react'\nimport Title from '../../../components/Title/Title'\nimport OCTable from './components/OCTable/OCTable'\nimport { array2022 } from '../../../common/constants/organizingCommittee'\n\n\nconst OrganizingCommittee2022page = () => {\n return (\n
    \n Organizing Committee\n \n
    \n )\n}\n\nexport default OrganizingCommittee2022page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst ReviewerInstruction2022page = () => {\n return (\n
    \n Reviewer Instructions\n

    Thank you for agreeing to review CLearR 2022! Your assessment is vital to creating a high quality program. This page provides the review guidelines that will help you to write reviews efficiently and effectively.

    \n
    Main tasks
    \n
      \n
    1. Preparation (by Oct 22, 2021)
    2. \n
        \n
      • CLeaR 2022 is using the OpenReview System. Please create your OpenReview profile if you do not have one and make sure it is up to date if you already have an account.
      • \n
      • Reviewer invitations will be sent via noreply@openreview.net. Please accept the reviewer invitation before the expiry date.
      • \n
      • Please read and agree to CleaR 2022 codes of conduct and declare the right conflicts of interests.
      • \n
      \n
    3. Paper bidding and assignments checking (Oct 25, 2021 - Oct 30, 2021)
    4. \n
        \n
      • Please bid on the papers that fall into your area of expertise. Your bidding is an important input to the overall matching results.
      • \n
      • Please check the assigned papers right after the paper assignment. If you do not feel qualified to review a paper or find potential conflicts of interest, please communicate with your AC as soon as possible.
      • \n
      \n
    5. Write thorough and timely reviews: (Nov 2, 2021 - Nov 22, 2021)
    6. \n
        \n
      • Please make your review as deep and detailed as possible. Superficial reviews are not really helpful in making final decisions. It is also important to treat each submission fairly and provide unbiased reviews.
      • \n
      • A review form has been designed to facilitate the review process. Please refer to the “Review Form” section for a step-by-step instruction on how to answer each question in the review form.
      • \n
      \n
    7. Discuss with authors/fellow reviewers/ACs (Dec 4, 2021 -- Dec 28, 2021)
    8. \n
        \n
      • Before the start of discussions, please carefully read author responses with an open mind to avoid possible misunderstandings. Even if the author's rebuttal does not change your opinion, please acknowledge that you have read and considered it.
      • \n
      • A further discussion with the authors will be enabled during the discussion period. If you want the authors to clarify more things after reading the rebuttal, you can discuss with them on the paper’s page.
      • \n
      • All reviewers should actively participate in discussions with fellow reviewers and ACs to have a more comprehensive understanding of each paper. The discussions are especially important for borderline papers and papers with high variance assessments. While engaging in the discussion, please be professional, polite, and keep an open mind. Although full consensus makes the final decision easier, it is not mandatory in the reviewing process, as different people may have different perspectives.
      • \n
      • If you change your opinion during or after the discussion phase, please update your ratings and give specific reasons in the final comments.
      • \n
      \n
    \n
    Review form
    \n
      \n
    1. Summary. Summarize the main contributions of each paper. The contributions may be new problems, theories, methods, algorithms, applications, benchmarks, etc.
    2. \n
    3. Main review. Please provide an in-depth review of each paper by considering the following aspects:
    4. \n
        \n
      • Originality: Does the paper provide anything new, like a new problem or a new method? Is the novelty compared to existing works well justified? Is it possible that similar ideas have been studied but the paper does not cite them properly?
      • \n
      • Significance: Does the paper address an important problem? How relevant are the results to the CLeaR community? Does the proposed theory or method significantly advance the state-of-the-art? Do the results in the paper provide new insights to the research problem? Is this paper likely to have broad impacts outside the CLeaR community, e.g., in natural/social science or engineering?
      • \n
      • Technical quality: Is the proposed approach technically sound? Are claims substantiated by theoretical and/or empirical results? Are the derivations and proofs correct? Is the proposed method unnecessarily complicated? Are the hyperparameters tuned in an appropriate manner?
      • \n
      • Clarity: Is the submission clearly written and well organized? Is the take home message easily extractable from the paper? Is the motivation well explained by illustrations and examples? Are the technical details described rigorously? Is there a significant amount of typos that make the paper hard to read?
      • \n
      \n
    5. Overall score. We use a 10-point scoring system for the overall assessment. Please select the category that best describes your assessment of the paper.
    6. \n
        \n
      • 10: Top 5% of accepted papers, seminal paper
      • \n
      • 9: Top 15% of accepted papers, strong accept
      • \n
      • 8: Top 50% of accepted papers, clear accept
      • \n
      • 7: Good paper, accept
      • \n
      • 6: Marginally above acceptance threshold
      • \n
      • 5: Marginally below acceptance threshold
      • \n
      • 4: Ok but not good enough - rejection
      • \n
      • 3: Clear rejection
      • \n
      • 2: Strong rejection
      • \n
      • 1: Trivial or wrong
      • \n
      \n
    7. Confidence score. Please select the category that best describes your confidence in the assessment of the submission.
    8. \n
        \n
      • 5: You are absolutely certain about your assessment. You are very familiar with the related work and checked the math/other details carefully.
      • \n
      • 4: You are confident in your assessment, but not absolutely certain. It is unlikely, but not impossible, that you did not understand some parts of the submission or that you are unfamiliar with some pieces of related work.
      • \n
      • 3: You are fairly confident in your assessment. It is possible that you did not understand some parts of the submission or that you are unfamiliar with some pieces of related work. Math/other details were not carefully checked.
      • \n
      • 2: You are willing to defend your assessment, but it is quite likely that you did not understand central parts of the submission or that you are unfamiliar with some pieces of related work. Math/other details were not carefully checked.
      • \n
      • 1: Your assessment is an educated guess. The submission is not in your area or the submission was difficult to understand. Math/other details were not carefully checked.
      • \n
      \n
    \n
    Policies
    \n

    Confidentiality. By reviewing CleaR 2022, you must agree to keep all material and information related to the review confidential. In particular, you must not use ideas and results from submitted papers in your own research or distribute them to others. You should delete all reviewing material, such as the submitted code, at the end of the reviewing cycle. You should not talk about submissions or content related to the reviewing of submissions to anyone without prior approval from the program chairs.

    \n

    Double-blind reviewing. The CLeaR review process is double-blind: reviewers and authors will both stay anonymous to each other during the review process. However, author names will be visible to area chairs and program chairs. Authors are responsible for anonymizing their submissions. Submissions may not contain any identifying information that may violate the double-blind reviewing policy. If you are assigned a submission that is not adequately anonymized, please contact the corresponding AC. Also, you should not attempt to find out the identities of authors for any of your assigned submissions, e.g., by searching arXiv preprints. Reviewer names are visible to the area chair (and program chairs), but the reviewers will not know names of other reviewers. Please do not disclose your identity to authors and fellow reviewers in the discussions.

    \n

    Dual Submissions.CLeaR does not allow double submissions. Namely, submissions should not have been previously published in or submitted to a journal or the proceedings of another conference at any point during the CLeaR review process. Submissions as extended abstracts (5 pages or less), to workshops or non-archival venues (without a proceedings), will not be considered a concurrent submission. Authors may submit anonymized work to CLeaR that is already available as a preprint (e.g., on arXiv) without citing it. If you suspect that a submission that has been assigned to you is a dual submission or if you require further clarification, please contact the corresponding AC. Please see Call for Papers for more information about dual submissions.

    \n

    Violations of formatting instructions. Submissions are limited to 12 single-column PMLR-formatted pages, plus unlimited additional pages for references and appendices. Authors of accepted papers will have the option of opting out of the proceedings in favor of a 1-page extended abstract, which will point to an open access archival version of the full paper reviewed for CLeaR. If you are assigned a paper that is overlength or appears to violate the CLeaR proceedings format (e.g., by decreasing margins or font size, by removing some pre-fixed spaces, etc), please notify the corresponding AC immediately.

    \n \n

    * Please also review the policies in the CLeaR 2022 Call for Papers.

    \n
    \n )\n}\n\nexport default ReviewerInstruction2022page\n","export const array = [\n {\n persons: [\n {\n name: \"Aapo Hyvärinen\",\n href: \"https://www.cs.helsinki.fi/u/ahyvarin/index.shtml\",\n address: \"University of Helsinki, Finland\",\n },\n {\n name: \"Andrea Rotnitzky\",\n href: \"https://www.hsph.harvard.edu/andrea-rotnitzky/\",\n address: \"Harvard University, USA\",\n },\n {\n name: \"Bernhard Schölkopf\",\n href: \"https://www.is.mpg.de/~bs\",\n address: \"Max Planck Institute for Intelligent Systems, Germany\",\n },\n {\n name: \"Bin Yu\",\n href: \"https://binyu.stat.berkeley.edu/\",\n address: \"UC Berkeley, USA\",\n },\n {\n name: \"David Heckerman\",\n href: \"http://heckerman.com/david/\",\n address: \"Amazon, USA\",\n },\n {\n name: \"James Robins\",\n href: \"https://www.hsph.harvard.edu/james-robins/\",\n address: \"Harvard University, USA\",\n },\n {\n name: \"Judea Pearl\",\n href: \"http://bayes.cs.ucla.edu/jp_home.html\",\n address: \"UCLA, USA\",\n },\n {\n name: \"Leon Bottou\",\n href: \"https://leon.bottou.org/\",\n address: \"Facebook AI Research, USA\",\n },\n ],\n },\n];\n\nexport const array1 = [\n {\n persons: [\n {\n name: \"Marloes Maathuis\",\n href: \"https://stat.ethz.ch/~mmarloes/\",\n address: \"ETH Zurich, Switzerland\",\n },\n {\n name: \"Max Welling\",\n href: \"https://staff.fnwi.uva.nl/m.welling/\",\n address: \"University of Amsterdam, Netherlands \",\n },\n {\n name: \"Peter Spirtes\",\n href: \"https://www.cmu.edu/dietrich/philosophy/people/faculty/spirtes.html\",\n address: \"Carnegie Mellon University, USA\",\n },\n {\n name: \"Susan Murphy\",\n href: \"http://people.seas.harvard.edu/~samurphy/\",\n address: \"Harvard University, USA\",\n },\n {\n name: \"Terry Sejnowski\",\n href: \"https://www.salk.edu/scientist/terrence-sejnowski/\",\n address: \"Salk Institute, USA \",\n },\n {\n name: \"Thomas Richardson\",\n href: \"https://sites.stat.washington.edu/tsr/website/inquiry/home.php\",\n address: \"University of Washington, USA\",\n },\n {\n name: \"Victor Chernozhukov\",\n href: \"http://www.mit.edu/~vchern/\",\n address: \"Massachusetts Institute of Technology, USA\",\n },\n {\n name: \"Yoshua Bengio\",\n href: \"https://yoshuabengio.org/\",\n address: \"University of Montreal, Canada\",\n },\n \n ],\n },\n];\n\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport OCTable from '../OrganizingCommitteePage/components/OCTable/OCTable'\nimport {array ,array1} from '../../../common/constants/advisoryboard'\n\nconst AdvisoryBoard2022page = () => {\n return (\n
    \n Advisory Board\n \n \n \n \n \n
    \n
    \n )\n}\n\nexport default AdvisoryBoard2022page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport './SponsorsPage.scss';\n\nconst Sponsors2022page = () => {\n return (\n
    \n Sponsors\n
    \n
    Gold Sponsors
    \n \n \n \n \n \n \n
    \n
    Silver Sponsors
    \n \n \n \n \n \n
    \n
    Bronze Sponsors
    \n \n \n \n \n
    \n
    \n
    \n )\n}\n\nexport default Sponsors2022page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst AreaChair2022page = () => {\n return (\n
    \n Area Chairs\n {/* */}\n Alexander D'Amour (Google Brain)
    \n Anirudh Goyal (University of Montreal / Mila)
    \n Cosma Shalizi (Carnegie Mellon University)
    \n David Blei (Columbia University)
    \n David Lopez-Paz (Facebook AI)
    \n Dominik Rothenhäusler (Stanford University)
    \n Elias Bareinboim (Columbia University)
    \n Emilija Perković (University of Washington)
    \n Emre Kiciman (Microsoft Research)
    \n Francesco Locatello (Amazon AWS)
    \n Ilya Shpitser (Johns Hopkins University)
    \n Irina Higgins (DeepMind)
    \n Irina Rish (University of Montreal / Mila)
    \n Jiji Zhang (Hong Kong Baptist University)
    \n Jonas Peters (University of Copenhagen)
    \n Joris Mooij (University of Amsterdam)
    \n Michel Besserve (Max Planck Institute for Intelligent Systems / Biological Cybernetics)
    \n Michele Sebag (CNRS)
    \n Mingming Gong (University of Melbourne)
    \n Nathan Kallus (Cornell University)
    \n Negar Kiyavash (EPFL)
    \n Peter Spirtes (Carnegie Mellon University )
    \n Shohei Shimizu (Shiga University)
    \n Tom Claassen (Radboud University Nijmegen)
    \n
    \n
    \n )\n}\n\nexport default AreaChair2022page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst ProgramCommittee2022page = () => {\n return (\n
    \n Program Committee\n \n Adith Swaminathan (Microsoft)
    \n Alessio Moneta (Scuola Superiore Sant'Anna Pisa)
    \n Allison Koenecke (Microsoft)
    \n Amit Sharma (Microsoft Research)
    \n AmirEmad Ghassami (Johns Hopkins University)
    \n Andrew Ying (University of Pennsylvania)
    \n Angela Zhou (University of California Berkeley)
    \n Anne Helby Petersen (University of Copenhagen)
    \n Arjen Hommersom (Open University of the Netherlands)
    \n Badr Youbi Idrissi (Ecole Normale Superieure)
    \n Bo Han (HKBU)
    \n Bryon Aragam (University of Chicago)
    \n Bryan Andrews (Carnegie Mellon University)
    \n Caleb H Miles (Columbia University)
    \n Carlos Cinelli (University of Washington)
    \n Christopher John Quinn (Iowa State University)
    \n Clark Glymour (Carnegie Mellon University)
    \n Chunchen Liu (Alibaba Group)
    \n Conor Mayo-Wilson (University of Washington)
    \n Daniel Chicharro (City University of Hong Kong)
    \n David Danks (University of California, San Diego)
    \n David Jensen (University of Massachusetts, Amherst)
    \n Dinghuai Zhang (Montreal Institute for Learning Algorithms)
    \n Divyansh Kaushik (Carnegie Mellon University)
    \n Erich Kummerfeld (University of Minnesota)
    \n Feng Xie (Peking University)
    \n Frederick Eberhardt (California Institute of Technology)
    \n Fredrik Daniel Johansson (Chalmers University of Technology)
    \n Gregory F Cooper (University of Pittsburgh)
    \n Ignavier Ng (Carnegie Mellon University)
    \n Ioan Gabriel Bucur (Radboud University Nijmegen)
    \n Jacob Dorn (Princeton University)
    \n Jakob Runge (TU Berlin)
    \n Jalal Etesami (Swiss Federal Institute of Technology Lausanne)
    \n Jaron J.R. Lee (Johns Hopkins University)
    \n Jinzhou Li (Swiss Federal Institute of Technology)
    \n Juan D. Correa (Universidad Aut´ónoma de Manizales)
    \n Johannes Textor (Radboud University )
    \n Jose Peña (Linköping University)
    \n Junzhe Zhang(Columbia University)
    \n Kartik Ahuja (Montreal Institute for Learning Algorithms)
    \n Karthika Mohan (Oregon State University)
    \n Kayvan Sadeghi (University College London)
    \n Kelly Wang Zhang (Harvard University)
    \n Konstantin Genin (University of Tuebingen)
    \n Kun Kuang (Zhejiang University)
    \n Linbo Wang (Toronto University)
    \n Lin Liu (Shanghai Jiao Tong University)
    \n Marco Zaffalon (IDSIA)
    \n Martin Arjovsky (New York University)
    \n Markus Kalisch (Swiss Federal Institute of Technology)
    \n Maxime Peyrard (Swiss Federal Institute of Technology Lausanne)
    \n Maxime Gasse (MILA research institute)
    \n Menghan Wang (eBay)
    \n Melih Barsbey (Bogazici University)
    \n Mingzhang Yin (Columbia University)
    \n Marc-André Legault (McGill University)
    \n Murat Kocaoglu (Purdue University)
    \n Nicolas Usunier (Facebook)
    \n Nikolaj Thams (University of Copenhagen)
    \n Noam Finkelstein (Johns Hopkins University)
    \n Numair Sani (Johns Hopkins University)
    \n Olivier Goudet (University of Angers )
    \n Paul Rolland (Swiss Federal Institute of Technology Lausanne)
    \n Peng Cui (Tsinghua University)
    \n Petar Stojanov (MIT)
    \n Phillip Lippe (University of Amsterdam)
    \n Philippe Brouillard (University of Montreal)
    \n Ranjani Srinivasan (Johns Hopkins University)
    \n Reda ALAMI (Paris Saclay University)
    \n Richard Guo (University of Washington)
    \n Ruibo Tu (KTH)
    \n Ruocheng Guo (City University of Hong Kong)
    \n Ruben Sanchez Romero (Rutgers University)
    \n Rohit Bhattacharya (Williams College)
    \n Santacatterina Michele (NYU Langone Health)
    \n Sara Magliacane (University of Amsterdam)
    \n Sanghack Lee (Seoul National University)
    \n Sebastien Lachapelle (University of Montreal)
    \n Shengyu Zhu (Huawei Noah's Ark Lab)
    \n Shuyu Dong (Inria)
    \n Sindy Löwe (University of Amsterdam)
    \n Sisi Ma (University of Minnesota - Twin Cities)
    \n Takashi Nicholas Maeda (RIKEN)
    \n Tongliang Liu (University of Sydney)
    \n Thomas Icard (Stanford University)
    \n Weijia Zhang (Southeast University)
    \n Wei Huang (University of Melbourne)
    \n Weiran Yao (Carnegie Mellon University)
    \n Wolfgang Wiedermann (University of Missouri-Columbia)
    \n Xiaojie Mao (Tsinghua University)
    \n Yi Su (University of California Berkeley)
    \n Yixin Wang (University of Michigan)
    \n Yan Zeng (Tsinghua University)
    \n Yue Liu (Renmin University of China)
    \n Yuhao Wang (Tsinghua University)
    \n Y. Samuel Wang (Cornell University)
    \n Zhuangyan Fang (Peking University)
    \n
    \n
    \n )\n}\n\nexport default ProgramCommittee2022page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport DatesTable from \"../ImportantDatesPage/components/DateTable/DatesTable\";\nimport { importantDates2022 } from \"../../../common/constants/importantDates\";\nimport \"./CLeaR2022Page.scss\";\n\nconst CLeaR2022page = () => {\n return (\n
    \n CLeaR (Causal Learning and Reasoning) 2022\n
    \n

    \n Causality is a fundamental notion in science and engineering. In the\n past few decades, some of the most influential developments in the\n study of causal discovery, causal inference, and the causal treatment\n of machine learning have resulted from cross-disciplinary efforts. In\n particular, a number of machine learning and statistical analysis\n techniques have been developed to tackle classical causal discovery\n and inference problems. On the other hand, the causal view has been\n shown to be able to facilitate formulating, understanding, and\n tackling a number of hard machine learning problems in transfer\n learning, reinforcement learning, and deep learning.\n

    \n

    \n We invite submissions to the 1st conference on Causal Learning and\n Reasoning (CLeaR), and welcome paper submissions that describe new\n theory, methodology, and/or applications relevant to any aspect of\n causal learning and reasoning in the fields of artificial intelligence\n and statistics. Submitted papers will be evaluated based on their\n novelty, technical quality, and potential impact. Experimental methods\n and results are expected to be reproducible, and authors are strongly\n encouraged to make code and data available. We also encourage\n submissions of proof-of-concept research that puts forward novel ideas\n and demonstrates potential for addressing problems at the intersection\n of causality and machine learning. CLeaR 2022 will be held in Eureka, CA, USA from April 11 to 13, 2022, with virtual elements. \n

    \n Topics of submission may include, but are not limited to:\n
      \n
    • Machine learning building on causal principles
    • \n
    • Causal discovery in complex environments
    • \n
    • Efficient causal discovery in large-scale datasets
    • \n
    • Causal effect identification and estimation
    • \n
    • Causal generative models for machine learning
    • \n
    • \n Unsupervised and semi-supervised deep learning connected to causality\n
    • \n
    • Machine learning with heterogeneous data sources
    • \n
    • Benchmark for causal discovery and causal reasoning
    • \n
    • Reinforcement learning
    • \n
    • \n Fairness, accountability, transparency, explainability,\n trustworthiness, and recourse{\" \"}\n
    • \n
    • Applications of any of the above to real-world problems
    • \n
    \n

    Submit at https://openreview.net/group?id=cclear.cc/CLeaR/2022/Conference.

    \n
    \n

    \n
    \n
    Updates
    \n
      \n
    • 12/03/2022, CleaR 2022 conference registration is open. Please register here (registration closed).
    • \n
    • 07/03/2022, CleaR 2022 conference schedule is available.
    • \n
    • 10/02/2022, cameray-ready instructions can be found at Camera-Ready Instrucutions.
    • \n
    • 13/01/2022, accepted papers can be found at Accepted Paper List.
    • \n
    \n
    \n

    \n
    \n
    Important Dates
    \n \n
    \n
    \n );\n};\n\nexport default CLeaR2022page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst FullAgenda2022page = () => {\n return (\n
    \n Full Agenda\n

    CLeaR 2022 is co-located with a CIFAR/Sloan invitation-based causality workshop. This workshop is organized by CIFAR in conjunction with the Learning in Machines and Brains program directed by Yoshua Bengio and Yann LeCun.

    \n
    Day 1 (April 11, Monday)
    \n \n 9:30-10:30 Panel 1: Causal theory & methodology\n \n \n 11:00-11:45 Plenary talk\n \n \n 11:45-12:30 Plenary talk\n \n \n 12:30-14:00 Lunch\n 14:00-15:00 CLeaR talks: Causal discovery and inference (17 minutes + 3-minute Q&A for each talk)
    \n \n \n 15:30-15:45 CLeaR spotlights: Poster session 1 (in person; 1.5 minutes for each spotlight)
    \n \n \n 15:45-16:00 CLeaR spotlights: Poster session 2 (remote; 1.5 minutes for each spotlight)
    \n \n \n 16:00-16:45 CLeaR poster session 1 (in person)\n \n 16:45-17:30 CLeaR poster session 2 (online via Zoom breakout rooms)\n \n
    \n
    \n
    Day 2 (April 12, Tuesday)
    \n \n 9:30-10:30 Panel 2: Representation learning\n \n 11:00-11:45 Plenary talk\n \n 12:00-14:00 Lunch\n 14:00-15:00 CLeaR talks: Causality and other learning problems (17 minutes + 3-minute Q&A for each talk)\n \n \n 15:00-20:30 Trip to redwoods; dinner there\n \n
    \n
    \n
    Day 3 (April 13, Wednesday)
    \n \n 9:30-10:30 Panel 3: Systems & applications\n \n 11:00-11:45 Plenary talk\n \n 12:00-14:00 Lunch\n 14:00-15:00 CLeaR talks: Causality and ethical AI (17 minutes + 3-minute Q&A for each talk) \n \n \n 15:30-15:45 CLeaR spotlights: Poster session 3 (in person; 1.5 minutes for each spotlight)\n \n \n 15:45-16:00 CLeaR spotlights: Poster session 4 (remote; 1.5 minutes for each spotlight)\n \n \n 16:00-17:30 CLeaR poster session 3 (in person); CLeaR poster session 4 (online via Zoom breakout rooms)\n
    \n
    \n
    \n )\n}\n\nexport default FullAgenda2022page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst KeynoteSpeaker2022page = () => {\n return (\n
    \n Plenary Speakers\n \n\n - Alison Gopnik, University of California, Berkeley\n\nTitle: Causal Learning in Children and Computers.\n\n

    Abstract: I will describe our research showing how even very young human children engage in effective causal inference - discovering new causal relationships through observation and intervention. This includes not only inferring specific causal relationships but the discovery of abstract causal “over-hypotheses” , variable discovery, analogical reasoning and active learning through exploration. I will discuss implications for causal learning in AI systems.

    \n\n\n- Judea Pearl, University of California, Los Angeles \n\nTitle: The Science of Cause and Effect: From Deep Learning to Deep Understanding\n\n

    Abstract: We will define \"deep understanding\" as the capacity to answer questions at all three levels of the reasoning hierarchy: predictions, actions, and imagination. Accordingly I will describe a language, calculus and algorithms that facilitate all three modes of reasoning, using a data + model symbiosis. The talk will then summarize several reasoning tasks that have benefitted from this calculus, including attribution, mediation, data-fusion and missing-data. I will conclude with future horizons, which include: automated scientific explorations, personalized decision making and social intelligence.

    \n\n\n- James Robins, Harvard School of Public Health \nTitle:The Bold Vision of Artificial Intelligence and Philosophy: Directed Acyclic Graphs and Causal Discovery from Non-Independence Constraints\n

    Abstract: My talk is a Parable of Statistics and Big Data wherein our hero pulls a rabbit out of the hat without first putting a rabbit into the hat and thereby wins the Nobel Prize in Biology.

    \n\n\n- Peter Spirtes, Carnegie Mellon University \n\nTitle: Inferring Causal Relations from Sample Data\n\n

    Abstract: I will present a broad overview of the assumptions, types of algorithms, and the limitations of algorithms for inferring causal structures from sample data, along with some illustrations of actual applications. I will explain how the different assumptions and kinds of data (e.g. no latent confounders, parametric constraints, and i.i.d. data) affect inference, and some recent advances that have been made in relaxing the assumptions that have been commonly made in the past. I will briefly relate causal inference to some problems in Artificial Intelligence.

    \n
    \n
    \n )\n}\n\nexport default KeynoteSpeaker2022page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\n\nconst CallforPapers2022page = () => {\n return (\n
    \n Call for Papers\n\n

    We invite submissions to the 1st Conference on Causal Learning and Reasoning (CLeaR), and welcome paper submissions that describe new theory, methodology, and/or applications relevant to any aspect of causal learning and reasoning in the fields of artificial intelligence and statistics. Accepted papers will be published in the Proceedings of Machine Learning Research (PMLR). \n

    \n\t
    Key dates
    \n\tThe planned dates are as follows:\n\t\n\t

    Submit at https://openreview.net/group?id=cclear.cc/CLeaR/2022/Conference.

    \n\t
    Summary
    \n\t

    Causality is a fundamental notion in science and engineering. In the past few decades, some of the most influential developments in the study of causal discovery, causal inference, and the causal treatment of machine learning have resulted from cross-disciplinary efforts. In particular, a number of machine learning and statistical analysis techniques have been developed to tackle classical causal discovery and inference problems. On the other hand, the causal view has been shown to facilitate formulating, understanding, and tackling a broad range of problems, including domain generalization, robustness, trustworthiness, and fairness across machine learning, reinforcement learning, and statistics.

    \n\n\t

    We invite papers that describe new theory, methodology and/or applications relevant to any aspect of causal learning and reasoning in the fields of artificial intelligence and statistics. Submitted papers will be evaluated based on their novelty, technical quality, and potential impact. Experimental methods and results are expected to be reproducible, and authors are strongly encouraged to make code and data available. We also encourage submissions of proof-of-concept research that puts forward novel ideas and demonstrates potential for addressing problems at the intersection of causality and machine learning.

    \n\t\n\t
    Paper Submission
    \n\t

    The proceedings track is the standard CLeaR paper submission track. Papers will be selected via a rigorous double-blind peer-review process. All accepted papers will be presented at the Conference as contributed talks or as posters and will be published in the Proceedings.

    \n\t

    \n\tTopics of submission may include, but are not limited to:\n\t

    \n\t

    \n\n\t
    Physical Attendance
    \n\t

    The CLeaR 2022 organizing committee prioritizes the safety and health of our community. We are still considering the format of the CLeaR 2022 conference. It will be preferably held as a hybrid conference with no mandatory physical attendance, but we also keep a backup plan of making the conference virtual in case of new pandemic situations. After our final decision, we will announce the format of the conference on the website. Thank you for your patience and understanding.

    \n\n\t
    Formatting and Supplementary Material
    \n\t

    Submissions are limited to 12 single-column PMLR-formatted pages, plus unlimited additional pages for references and appendices. Authors of accepted papers will have the option of opting out of the proceedings in favor of a 1-page extended abstract, which will point to an open access archival version of the full paper reviewed for CLeaR. You can also submit a single file of additional supplementary material separately, which may be either a pdf file (containing proof details, for instance) or a zip file that can include multiple files of all formats (such as code or videos). Note that reviewers are under no obligation to examine the supplementary material.

    \n\n\t

    Please format the paper using the official LaTeX style files. We do not support submission in formats other than LaTeX. Please do not modify the layout given by the style file.

    \n\t\n\t

    Submissions will be through OpenReview (https://openreview.net/group?id=cclear.cc/CLeaR/2022/Conference) and will be open approximately 4-6 weeks before the abstract submission deadline.

    \n\n\t
    Anonymization Requirements
    \n\t

    The CLeaR review process is double-blind: reviewers and authors will both stay anonymous to each other during the review process. We use OpenReview to host papers; however, public discussions are not allowed during the review process. The review comments are only visible to program chairs, area chairs, and reviewers with submitted review comments. Papers will be desk-rejected if they contain any information that can violate the double-blind reviewing policy, such as the author names or their affiliations, acknowledgements, or links that can infer any author’s identity or institution. Self-citations are allowed as long as anonymity is preserved. It is up to the author’s discretion how best to preserve anonymity when including self-citations. Possibilities include: leaving out a self-citation, including it but replacing the citation text with “removed for anonymous submission,” or leaving the citation as-is. We recommend leaving in a moderate number of self-citations for published or otherwise well-known work.

    \n\n\t

    Revisions are allowed in the submission system until the paper submission deadline. Changes will not be allowed afterwards.

    \n\n\t

    We strongly discourage advertising the preprint on social media or in the press while under submission to CLeaR. Preprints must not be explicitly identified as an CLeaR submission at any time during the review period (i.e., from the abstract submission deadline until the notification of the accept/reject decision).

    \n\n\t
    Dual Submissions
    \n\t

    CLeaR does not allow double submissions. Namely, submissions should not have been previously published in or submitted to a journal or the proceedings of another conference at any point during the CLeaR review process. Submissions to workshops or other non-archival venues (without a proceedings) will not be considered as dual submissions. Submissions as extended abstracts with 5 pages or less will not be considered a concurrent submission either. Authors may submit anonymized work to CLeaR that is already available as a preprint (e.g., on arXiv) without citing it. Tech reports (including reports on sites such as arXiv) do not count as prior publication. It is acceptable to have a substantially extended version of the submitted paper under consideration simultaneously for journal publication, so long as the journal version’s planned publication date is after our publication (April 13, 2022, tentatively), and it does not violate the journal's policy, the journal submission does not interfere with CLeaR right to publish the paper, and the situation is clearly described at the time of CLeaR submission. Please describe the situation in the appropriate box on the submission page (and do not include author information in the submission itself, to avoid accidental unblinding). Authors are also allowed to give talks to restricted audiences on the work(s) submitted to CLeaR during the review.

    \n\n\t

    All accepted papers will be presented at the Conference either as contributed talks or as posters, and will be published in the CLeaR Conference Proceedings in the Journal of Machine Learning Research Workshop and Conference Proceedings series. Papers for talks and posters will be treated equally in publication.

    \n\n\t
    Confidentiality
    \n\t

    The reviewers and area-chairs will have access to papers and supplementary materials that are assigned to them.

    \n\t\n\t

    The program chairs and workflow chairs will have access to all the papers. Everyone having access to papers and supplementary materials will be instructed to keep them confidential during the review process and delete them after the final decisions.

    \n\t\n\t

    Reviews will be visible to area chairs, program chairs, and workflow chairs throughout the process. At any stage of the process, author names will not be known to the reviewers or area chairs, but only visible to program chairs. Reviewer names are visible to the area chair (and program chairs), but the reviewers will not know names of other reviewers.

    \n\n\tBernhard Schölkopf, Caroline Uhler & Kun Zhang
    \n\tCLeaR 2022 General and Program Chairs\n\t

    \n
    \n )\n}\n\nexport default CallforPapers2022page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst ACInstruction2022page = () => {\n return (\n
    \n AC Instructions\n

    Thank you for serving as an area chair for CLeaR 2022. As an area chair, your job is to (1) ensure that all the submissions you are assigned have high-quality reviews and good discussions, and (2) write quality meta-reviews and make acceptance decisions. If there is any question, please contact the program chairs chairs.clear2022@gmail.com. The main tasks of area chairs are listed below.

    \n
    Main tasks
    \n
      \n
    1. Preparation (by Oct 22, 2021)
    2. \n
        \n
      • CLeaR 2022 is using the OpenReview System. Please create your OpenReview profile if you do not have one and make sure it is up to date if you already have an account.
      • \n
      • Please read and agree to CleaR 2022 codes of conduct and declare the right conflicts of interests.
      • \n
      • In addition to the guidelines below, please be familiar with the reviewer instructions. You will be interacting significantly with reviewers, so please make sure you understand what is expected of them.
      • \n
      \n
    3. Bid on papers (Oct 25, 2021 - Oct 30, 2021)
    4. \n
        \n
      • Log into OpenReview and bid on submissions that fall into your area of expertise. Your bidding is an important input to the overall matching results.
      • \n
      \n
    5. Check reviewer assignment (Oct 31, 2021 - Nov 2, 2021)
    6. \n
        \n
      • Make sure that every submission in your batch is matched with suitable reviewers before the reviewing process starts.
      • \n
      • If you notice a conflict of interest with a submission that is assigned to you, please contact the program chairs immediately so that the paper will be reassigned.
      • \n
      • You can invite extra reviewers, either in the existing pool or by inviting external reviewers, even after the reviewing process starts.
      • \n
      \n
    7. Make sure all papers have quality reviews (Nov 22, 2021 - Nov 26, 2021)
    8. \n
        \n
      • Initial reviews are due Monday, Nov 22. You might need to send multiple reminder emails. If a reviewer is unable to deliver a review, please find a replacement reviewer who is able to do emergency reviews.
      • \n
      • Read all reviews carefully. If a review is substandard, you should ask the reviewer to improve their review.
      • \n
      \n
    9. Discuss with reviewers and authors (Dec 4, 2021 -- Dec 28, 2021)
    10. \n
        \n
      • As soon as the discussion period starts, initiate and lead a discussion via OpenReview for each submission, and make sure the reviewers engage in the discussion phase.
      • \n
      • Make sure your reviewers read and respond to all author responses.
      • \n
      • A further discussion with the authors will be enabled during the discussion period. Please be cautious when posting a comment about who can see the discussions, only the reviewers or both reviewers and authors.
      • \n
      \n
    11. Make accept/reject decisions: (Jan 1, 2022 - Jan 8, 2022)
    12. \n
        \n
      • Write a meta-review that explains your decision (accept or reject) to the authors. Your comments should augment the reviews, and explain how the reviews, author response, and discussion were used to arrive at your decision. Do not dismiss or ignore a review unless you have a good reason for doing so. If the reviewers cannot come to a consensus, you should read the paper carefully and write a detailed meta-review.
      • \n
      • If you cannot make a decision, please reach out to the program chairs at your earliest convenience.
      • \n
      \n
    \n \n

    * Please review the policies in the CLeaR 2022 Call for Papers and Reviewer Instructions.

    \n
    \n )\n}\n\nexport default ACInstruction2022page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst CodeConduct2022page = () => {\n return (\n
    \n Code of Conduct\n
    Purpose
    \n

    At CLeaR 2022, our goal is to create a community and environment that recognizes and respects the intrinsic value of everyone, which is essential for the open exchange of ideas, freedom of thought and expression, and respectful scientific debate at the conference.

    \n
    Who
    \n

    All participants, including but not limited to organizers, reviewers, speakers, sponsors, and volunteers at our Conference and Conference-sponsored social events are required to agree with this Code of Conduct both during an event and on official communication channels, including social media. In particular, sponsors should not use sexual, racial, or other offensive images, events, or other materials. This code applies to both official sponsors and any organization that uses the conference name as a brand as part of its activities during or around the conference.

    \n
    Policy
    \n

    CLeaR is committed to providing all participants with an experience free from harassment, bullying, discrimination and retaliation. This includes offensive comments related to age, gender, gender identity and expression, race, sexual orientation, physical or intellectual disability, physical appearance, body type, ethnicity, religion, politics, technical choices, or any other personal characteristics. No tolerance for bullying, intimidation, personal assault, harassment, continuous interruption of conversations or other activities, and behavior that interferes with the full participation of other participants. This includes sexual harassment, stalking, stalking, harassing photography or recording, inappropriate physical contact, unwelcome sexual attention, vulgar communication, and diminutive characteristics. The policies apply to actual meeting sites and conference venues, including both physical venues, online venues, and official virtual platforms, including but not limited to Openreview comments, video, virtual streaming, Q&A tools. For example, offensive or even threatening comments in Open review are prohibited. Also, Zoom bombing or any virtual activities that have nothing to do with the topic of discussion and that are detrimental to the purpose of the topic or program are not allowed.

    \n
    Action
    \n

    Participants who are asked by any member of the community to stop any improper behavior defined here should immediately comply. Meeting organizers may take further actions at their discretion, including: formally or informally warning offenders, expelling the meeting without refunds, refusing to submit or participate in future CLeaR meetings, reporting the incident to the offender’s local agency or funding agency, or reporting to the local Authorities or law enforcement agencies report the incident. The ''just kidding'' response is unacceptable. If action is taken, an appeal procedure will be provided.

    \n
    Complaint reporting
    \n

    If you have any concerns about possible violation of the policies, please contact the conference chairs (chairs.clear2022@gmail.com) as soon as possible. Reports made during the conference will be responded to within 24 hours; those at other times in less than two weeks. Complaints and violations will be handled with discretion. We are ready and eager to help attendees contact relevant help services, escort them to a safe place, or to otherwise help people who have been harassed feel safe during the meeting. We gratefully accept feedback from the CLeaR community on our policy and actions.

    \n

    A similar version has been used by other conferences, such as ICLR 2020 and UAI 2020.

    \n
    \n\n );\n};\n\nexport default CodeConduct2022page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst CodeConduct2022page = () => {\n return (\n
    \n Conflicts of Interest\n

    This page defines what constitutes a conflict of interest for the CLeaR 2022 review process.

    \n

    If you are an author, reviewer, or area chair, please make sure to create or update your OpenReview profile. You will be asked to declare two types of conflicts---domain conflicts and personal conflicts. Both types are declared by filling out appropriate sections of your OpenReview profile, as described below.

    \n \n
    \n );\n};\n\nexport default CodeConduct2022page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\n\nconst CameraReady2022page = () => {\n return (\n
    \n Camera-ready Instructions\n

    The deadline for submitting your camera-ready version is Feb. 20, 2022. The camera-ready version of your paper may have one extra page, in order to address the review comments. That is, it may have 13 single-column PMLR-formatted pages, plus unlimited additional pages for references and appendices. Appendices should be included in the same pdf after the references section. Please use \\documentclass[final,12pt] {'{'}clear2022{'}'} when preparing the camera-ready version. Please don’t include any packages like “geometry” or “full page” to change the margins. For consistency across the proceedings, ensure your paper is formatted for letter paper rather than A4 (you can check with pdfinfo).

    \n \n
    \n );\n};\n\nexport default CameraReady2022page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport './RegistrationPage.scss';\n\nconst Registration2022page = () => {\n return (\n
    \n Venue and Registration\n

    CLeaR 2022 will be held at the Sequoia Conference Center (SCC), Eureka, CA, USA on April 11-13, 2022. We encourage in-person participation, but the conference also has virtual elements. \n We are happy to announce that CLeaR will be co-located with a CIFAR/Sloan invitation-based causality workshop. This workshop is organized by CIFAR in conjunction with the Learning in Machines and Brains program directed by Yoshua Bengio and Yann LeCun. CLeaR participants are welcome to attend all sessions of the workshop as well.

    \n\n

    Please register for the CLeaR 2022 here (registration closed). Please let us know if you need letters for visa application.

    \n\n

    We would like to mention that a number of authors and a majority of the organizing committee (including Bernhard Schölkopf, Caroline Uhler, Kun Zhang, Francesco Locatello, Frederick Eberhardt, Daniel Malinsky, Sara Magliacane, Chandler Squires, Patrick Burauel, Elizabeth Ogburn, and Julius von Kügelgen) have expressed their intention to attend in person. Below is the nearby hotel information:

    \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
    NameDistance to SCC Price (per room per night)Number of rooms (estimate)
    Best Western Plus, Humboldt Bay Inn1.5 miles14010+
    Hotel Arcata7.3 miles220+20
    Holiday Inn Express & Suites2.1 miles150+10+
    Clarion Hotel by Humboldt Bay 0.4 miles112+35+
    The Inn at 2nd & C 1.4 miles149+15+
    Carter House Inns0.8 miles22515+
    \n
    \n
    \n )\n}\n\nexport default Registration2022page\n","import React from \"react\";\nimport \"./DatesTable.scss\";\n\nconst DatesTable = ({array}) => {\n return (\n \n \n \n \n \n \n \n \n {array.map(({ name, date }) => (\n \n \n \n \n ))}\n \n
    NameDate
    {name}{date}
    \n );\n};\n\nexport default DatesTable;\n","import React from 'react'\nimport DatesTable from './components/DateTable/DatesTable'\nimport Title from '../../../components/Title/Title'\nimport { importantDates2023 } from '../../../common/constants/importantDates'\n\nconst ImportantDates2023page = () => {\n return (\n <>\n Important Dates\n \n \n )\n}\n\nexport default ImportantDates2023page;\n","import React from \"react\";\nimport ShowMoreText from 'react-show-more-text';\nimport \"./PaperTable.scss\";\n\nconst PaperTable = ({ array }) => {\n return (\n
    \n {array.map(({title, author, filePath, description}) => (\n \n \n

    {title}

    \n {author}

    \n Download PDF\n {description}\n \n \n ))}\n
    \n );\n};\n\nexport default PaperTable;\n","const acceptedDatasets = [\n {\n title: \"The Structurally Complex with Additive Parent Causality (SCARY) Dataset\",\n author:\n \"Jarry Chen, Haytham M. Fayek\",\n filePath: \"/2023/AcceptedDatasets/chen23a.pdf\",\n },\n {\n title: \"3DIdentBox: A Toolbox for Identifiability Benchmarking\",\n author:\n \"Alice Bizeul, Imant Daunhawer, Emanuele Palumbo, Bernhard Schölkopf, Alexander Marx, Julia E. Vogt\",\n filePath: \"/2023/AcceptedDatasets/bizeul23a.pdf\",\n },\n {\n title: \"Causal Benchmark Based on Disentangled Image Dataset\",\n author:\n \"Liyuan Xu, Arthur Gretton\",\n filePath: \"/2023/AcceptedDatasets/xu23a.pdf\",\n },\n {\n title: \"Synthetic Time Series: A Dataset for Causal Discovery\",\n author:\n \"Julio Munoz-Benıtez, L. Enrique Sucar\",\n filePath: \"/2023/AcceptedDatasets/munozbenitez23a.pdf\",\n },\n {\n title: \"SpaCE: The Spatial Confounding (Benchmarking) Environment\",\n author:\n \"Mauricio Tec, Ana Trisovic, Michelle Audirac, Francesca Dominici\",\n filePath: \"/2023/AcceptedDatasets/tec23a.pdf\",\n },\n {\n title: \"CausalEdu: a real-world education dataset for temporal causal discovery and inference\",\n author:\n \"Wenbo Gong, Digory Smith, Zichao Wang, Craig Barton, Simon Woodhead, Nick Pawlowski, Joel Jennings, Cheng Zhang\",\n filePath: \"/2023/AcceptedDatasets/gong23a.pdf\",\n },\n];\n\nexport default acceptedDatasets;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport PaperTable from \"./components/PaperTable/PaperTable\";\nimport acceptedDatasets from \"../../../common/constants/acceptedDatasets_2023\";\n\nconst AcceptedDatasets2023page = () => {\n return (\n //
    \n // Accepted Papers\n // TBD\n //
    \n
    \n Accepted Datasets\n \n
    \n\n );\n};\n\nexport default AcceptedDatasets2023page;\n","import React from \"react\";\nimport ShowMoreText from 'react-show-more-text';\nimport \"./PaperTable.scss\";\n\nconst PaperTable = ({ array }) => {\n return (\n
    \n {array.map(({title, author ,description}) => (\n \n \n

    {title}

    \n {author}\n {description}\n \n \n ))}\n
    \n );\n};\n\nexport default PaperTable;\n","const acceptedPapers = [\n {\n title: \"Generalizing Clinical Trials with Convex Hulls\",\n author:\n \"Eric Strobl, Thomas A Lasko\",\n },\n {\n title: \"Backtracking Counterfactuals\",\n author:\n \"Julius Von Kügelgen, Abdirisak Mohamed, Sander Beckers\",\n },\n {\n title: \"Stochastic Causal Programming for Bounding Treatment Effects\",\n author:\n \"Kirtan Padh, Jakob Zeitler, David Watson, Matt Kusner, Ricardo Silva, Niki Kilbertus\",\n },\n {\n title: \"Distinguishing Cause from Effect on Categorical Data: The Uniform Channel Model\",\n author:\n \"Mario A. T. Figueiredo, Catarina Oliveira\",\n },\n {\n title: \"Jointly Learning Consistent Causal Abstractions Over Multiple Interventional Distributions\",\n author:\n \"Fabio Massimo Zennaro, Máté Drávucz, Geanina Apachitei, W. Dhammika Widanage, Theodoros Damoulas\",\n },\n {\n title: \"Causal Abstraction with Soft Interventions\",\n author:\n \"Riccardo Massidda, Atticus Geiger, Thomas Icard, Davide Bacciu\",\n },\n {\n title: \"Directed Graphical Models and Causal Discovery for Zero-Inflated Data\",\n author:\n \"Shiqing Yu, Mathias Drton, Ali Shojaie\",\n }, \n {\n title: \"An Algorithm and Complexity Results for Causal Unit Selection\",\n author:\n \"Haiying Huang, Adnan Darwiche\",\n },\n];\n\nexport default acceptedPapers;\n","const acceptedPapers = [\n {\n title: \"Sample-Specific Root Causal Inference with Latent Variables\",\n author: \"Eric Strobl, Thomas A Lasko\",\n },\n {\n title: \"Causal Discovery for Non-stationary Non-linear Time Series Data Using Just-In-Time Modeling\",\n author: \"Daigo Fujiwara, Kazuki Koyama, Keisuke Kiritoshi, Tomomi Okawachi, Tomonori Izumitani, Shohei Shimizu\",\n },\n {\n title: \"Causal Models with Constraints\",\n author: \"Sander Beckers, Joseph Halpern, Christopher Hitchcock\",\n },\n {\n title: \"Non-parametric identifiability and sensitivity analysis of synthetic control models\",\n author: \"Jakob Zeitler, Athanasios Vlontzos, Ciarán Mark Gilligan-Lee\",\n },\n {\n title: \"Factorization of the Partial Covariance in Singly-Connected Path Diagrams\",\n author: \"Jose Peña\",\n },\n {\n title: \"Estimating long-term causal effects from short-term experiments and long-term observational data with unobserved confounding\",\n author: \"Graham Van Goffrier, Lucas Maystre, Ciarán Mark Gilligan-Lee\",\n },\n {\n title: \"Local Dependence Graphs for Discrete Time Processes\",\n author: \"Wojciech Niemiro, Łukasz Rajkowski\",\n },\n {\n title: \"Scalable Causal Discovery with Score Matching\",\n author: \"Francesco Montagna, Nicoletta Noceti, Lorenzo Rosasco, Kun Zhang, Francesco Locatello\",\n },\n {\n title: \"Causal Discovery with Score Matching on Additive Models with Arbitrary Noise\",\n author: \"Francesco Montagna, Nicoletta Noceti, Lorenzo Rosasco, Kun Zhang, Francesco Locatello\",\n },\n {\n title: \"Beyond the Markov Equivalence Class: Extending Causal Discovery under Latent Confounding\",\n author: \"Mirthe Maria Van Diepen, Ioan Gabriel Bucur, Tom Heskes, Tom Claassen\",\n },\n {\n title: \"Learning Conditional Granger Causal Temporal Networks\",\n author: \"Ananth Balashankar, Srikanth Jagabathula, Lakshmi Subramanian\",\n },\n {\n title: \"Learning Causal Representations of Single Cells via Sparse Mechanism Shift Modeling\",\n author: \"Romain Lopez, Natasa Tagasovska, Stephen Ra, Kyunghyun Cho, Jonathan Pritchard, Aviv Regev\",\n },\n {\n title: \"Branch-Price-and-Cut for Causal Discovery\",\n author: \"James Cussens\",\n },\n {\n title: \"Instrumental Processes Using Integrated Covariances\",\n author: \"Søren Wengel Mogensen\",\n },\n {\n title: \"A Meta-Reinforcement Learning Algorithm for Causal Discovery\",\n author: \"Andreas W.M. Sauter, Erman Acar, Vincent Francois-Lavet\",\n },\n {\n title: \"Causal Inference Despite Limited Global Confounding via Mixture Models\",\n author: \"Spencer L. Gordon, Bijan Mazaheri, Yuval Rabani, Leonard Schulman\",\n },\n {\n title: \"Causal Triplet: An Open Challenge for Intervention-centric Causal Representation Learning\",\n author: \"Yuejiang Liu, Alexandre Alahi, Chris Russell, Max Horn, Dominik Zietlow, Bernhard Schölkopf, Francesco Locatello\",\n },\n {\n title: \"Image-based Treatment Effect Heterogeneity\",\n author: \"Connor Thomas Jerzak, Fredrik Daniel Johansson, Adel Daoud\",\n },\n {\n title: \"Causal Learning through Deliberate Undersampling\",\n author: \"Kseniya Solovyeva, David Danks, Mohammadsajad Abavisani, Sergey Plis\",\n },\n {\n title: \"Influence-Aware Attention for Multivariate Temporal Point Processes\",\n author: \"Xiao Shou, Tian Gao, Dharmashankar Subramanian, Debarun Bhattacharjya, Kristin Bennett\",\n },\n {\n title: \"Evaluating Temporal Observation-Based Causal Discovery Techniques Applied to Road Driver Behaviour\",\n author: \"Rhys Peter Matthew Howard, Lars Kunze\",\n },\n {\n title: \"On Discovery of Local Independence over Continuous Variables via Neural Contextual Decomposition\",\n author: \"Inwoo Hwang, Yunhyeok Kwak, Yeon-Ji Song, Byoung-Tak Zhang, Sanghack Lee\",\n },\n {\n title: \"Local Causal Discovery for Estimating Causal Effects\",\n author: \"Shantanu Gupta, David Childers, Zachary Chase Lipton\",\n },\n {\n title: \"Can Active Sampling Reduce Causal Confusion in Offline Reinforcement Learning?\",\n author: \"Gunshi Gupta, Tim G. J. Rudner, Rowan Thomas McAllister, Adrien Gaidon, Yarin Gal\",\n },\n {\n title: \"Causal Inference with Non-IID Data under Model Uncertainty\",\n author: \"Chi Zhang, Karthika Mohan, Judea Pearl\",\n },\n {\n title: \"Factual Observation Based Heterogeneity Learning for Counterfactual Prediction\",\n author: \"Hao Zou, Haotian Wang, Renzhe Xu, Bo Li, Jian Pei, Ye Jun Jian, Peng Cui\",\n },\n {\n title: \"On the Interventional Kullback-Leibler Divergence\",\n author: \"Jonas Bernhard Wildberger, Siyuan Guo, Arnab Bhattacharyya, Bernhard Schölkopf\",\n },\n {\n title: \"Unsupervised Object Learning via Common Fate\",\n author: \"Matthias Tangemann, Steffen Schneider, Julius Von Kügelgen, Francesco Locatello, Peter Vincent Gehler, Thomas Brox, Matthias Kuemmerer, Matthias Bethge, Bernhard Schölkopf\",\n },\n {\n title: \"Practical Algorithms for Orientations of Partially Directed Graphical Models\",\n author: \"Malte Luttermann, Marcel Wienöbst, Maciej Liskiewicz\",\n },\n {\n title: \"Enhancing Causal Discovery from Robot Sensor Data in Dynamic Scenarios\",\n author: \"Luca Castri, Sariah Mghames, Marc Hanheide, Nicola Bellotto\",\n },\n {\n title: \"Leveraging Causal Graphs for Blocking in Randomized Experiments\",\n author: \"Abhishek Kumar Umrawal\",\n },\n];\n\nexport default acceptedPapers;\n","const acceptedPapers = [\n {\n title: \"Backtracking Counterfactuals\",\n author:\n \"Julius Von Kügelgen, Abdirisak Mohamed, Sander Beckers\",\n },\n];\n\nexport default acceptedPapers;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport PaperTable from \"./components/PaperTable/PaperTable\";\nimport acceptedPapers_oral from \"../../../common/constants/acceptedPapers_oral2023\";\nimport acceptedPapers_poster from \"../../../common/constants/acceptedPapers_poster2023\";\nimport acceptedPapers_best from \"../../../common/constants/acceptedPapers_best2023\";\nconst AcceptedPapers2023page = () => {\n return (\n //
    \n // Accepted Papers\n // TBD\n //
    \n
    \n Accepted Papers\n
    Best Paper award
    \n \n
    Oral
    \n \n
    Poster
    \n \n
    \n );\n};\n\nexport default AcceptedPapers2023page;\n","import React from \"react\";\nimport \"./OCTable.scss\";\n\nconst OCTable = ({ array }) => {\n return (\n \n {array.map(({ position, persons }) => (\n \n \n \n ))}\n
    \n

    {position}

    \n {persons.map(({ name, href, address }) => (\n
    \n \n {name}\n \n
    \n {address}\n
    \n ))}\n
    \n );\n};\n\nexport default OCTable;\n","import React from 'react'\nimport Title from '../../../components/Title/Title'\nimport OCTable from './components/OCTable/OCTable'\nimport { array2023 } from '../../../common/constants/organizingCommittee'\n\n\nconst OrganizingCommittee2023page = () => {\n return (\n
    \n Organizing Committee\n \n
    \n )\n}\n\nexport default OrganizingCommittee2023page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst ReviewerInstruction2023page = () => {\n return (\n
    \n Reviewer Instructions\n

    Thank you for agreeing to review CLearR 2023! Your assessment is vital to creating a high quality program. This page provides the review guidelines that will help you to write reviews efficiently and effectively.

    \n
    Main tasks
    \n
      \n
    1. Preparation (by Oct 28, 2022)
    2. \n
        \n
      • CLeaR 2023 is using the OpenReview System. Please create your OpenReview profile if you do not have one and make sure it is up to date if you already have an account.
      • \n
      • Reviewer invitations will be sent via noreply@openreview.net. Please accept the reviewer invitation before the expiry date.
      • \n
      • Please read and agree to CleaR 2023 codes of conduct and declare the right conflicts of interests.
      • \n
      \n
    3. Paper bidding and assignments checking (Oct 31, 2022 - Nov 4, 2022)
    4. \n
        \n
      • Please bid on the papers that fall into your area of expertise. Your bidding is an important input to the overall matching results.
      • \n
      • Please check the assigned papers right after the paper assignment. If you do not feel qualified to review a paper or find potential conflicts of interest, please communicate with your AC as soon as possible.
      • \n
      \n
    5. Write thorough and timely reviews: (Nov 10, 2022 - Nov 29, 2022)
    6. \n
        \n
      • Please make your review as deep and detailed as possible. Superficial reviews are not really helpful in making final decisions. It is also important to treat each submission fairly and provide unbiased reviews.
      • \n
      • A review form has been designed to facilitate the review process. Please refer to the “Review Form” section for a step-by-step instruction on how to answer each question in the review form.
      • \n
      \n
    7. Discuss with authors/fellow reviewers/ACs (Dec 12, 2022 -- Dec 30, 2022)
    8. \n
        \n
      • Before the start of discussions, please carefully read author responses with an open mind to avoid possible misunderstandings. Even if the author's rebuttal does not change your opinion, please acknowledge that you have read and considered it.
      • \n
      • A further discussion with the authors will be enabled during the discussion period. If you want the authors to clarify more things after reading the rebuttal, you can discuss with them on the paper’s page.
      • \n
      • All reviewers should actively participate in discussions with fellow reviewers and ACs to have a more comprehensive understanding of each paper. The discussions are especially important for borderline papers and papers with high variance assessments. While engaging in the discussion, please be professional, polite, and keep an open mind. Although full consensus makes the final decision easier, it is not mandatory in the reviewing process, as different people may have different perspectives.
      • \n
      • If you change your opinion during or after the discussion phase, please update your ratings and give specific reasons in the final comments.
      • \n
      \n
    \n
    Review form
    \n
      \n
    1. Summary. Summarize the main contributions of each paper. The contributions may be new problems, theories, methods, algorithms, applications, benchmarks, etc.
    2. \n
    3. Main review. Please provide an in-depth review of each paper by considering the following aspects:
    4. \n
        \n
      • Originality: Does the paper provide anything new, like a new problem or a new method? Is the novelty compared to existing works well justified? Is it possible that similar ideas have been studied but the paper does not cite them properly?
      • \n
      • Significance: Does the paper address an important problem? How relevant are the results to the CLeaR community? Does the proposed theory or method significantly advance the state-of-the-art? Do the results in the paper provide new insights to the research problem? Is this paper likely to have broad impacts outside the CLeaR community, e.g., in natural/social science or engineering?
      • \n
      • Technical quality: Is the proposed approach technically sound? Are claims substantiated by theoretical and/or empirical results? Are the derivations and proofs correct? Is the proposed method unnecessarily complicated? Are the hyperparameters tuned in an appropriate manner?
      • \n
      • Clarity: Is the submission clearly written and well organized? Is the take home message easily extractable from the paper? Is the motivation well explained by illustrations and examples? Are the technical details described rigorously? Is there a significant amount of typos that make the paper hard to read?
      • \n
      \n
    5. Overall score. We use a 10-point scoring system for the overall assessment. Please select the category that best describes your assessment of the paper.
    6. \n
        \n
      • 10: Top 5% of accepted papers, seminal paper
      • \n
      • 9: Top 15% of accepted papers, strong accept
      • \n
      • 8: Top 50% of accepted papers, clear accept
      • \n
      • 7: Good paper, accept
      • \n
      • 6: Marginally above acceptance threshold
      • \n
      • 5: Marginally below acceptance threshold
      • \n
      • 4: Ok but not good enough - rejection
      • \n
      • 3: Clear rejection
      • \n
      • 2: Strong rejection
      • \n
      • 1: Trivial or wrong
      • \n
      \n
    7. Confidence score. Please select the category that best describes your confidence in the assessment of the submission.
    8. \n
        \n
      • 5: You are absolutely certain about your assessment. You are very familiar with the related work and checked the math/other details carefully.
      • \n
      • 4: You are confident in your assessment, but not absolutely certain. It is unlikely, but not impossible, that you did not understand some parts of the submission or that you are unfamiliar with some pieces of related work.
      • \n
      • 3: You are fairly confident in your assessment. It is possible that you did not understand some parts of the submission or that you are unfamiliar with some pieces of related work. Math/other details were not carefully checked.
      • \n
      • 2: You are willing to defend your assessment, but it is quite likely that you did not understand central parts of the submission or that you are unfamiliar with some pieces of related work. Math/other details were not carefully checked.
      • \n
      • 1: Your assessment is an educated guess. The submission is not in your area or the submission was difficult to understand. Math/other details were not carefully checked.
      • \n
      \n
    \n
    Policies
    \n

    Confidentiality. By reviewing CleaR 2023, you must agree to keep all material and information related to the review confidential. In particular, you must not use ideas and results from submitted papers in your own research or distribute them to others. You should delete all reviewing material, such as the submitted code, at the end of the reviewing cycle. You should not talk about submissions or content related to the reviewing of submissions to anyone without prior approval from the program chairs.

    \n

    Double-blind reviewing. The CLeaR review process is double-blind: reviewers and authors will both stay anonymous to each other during the review process. However, author names will be visible to area chairs and program chairs. Authors are responsible for anonymizing their submissions. Submissions may not contain any identifying information that may violate the double-blind reviewing policy. If you are assigned a submission that is not adequately anonymized, please contact the corresponding AC. Also, you should not attempt to find out the identities of authors for any of your assigned submissions, e.g., by searching arXiv preprints. Reviewer names are visible to the area chair (and program chairs), but the reviewers will not know names of other reviewers. Please do not disclose your identity to authors and fellow reviewers in the discussions.

    \n

    Dual Submissions.CLeaR does not allow double submissions. Namely, submissions should not have been previously published in or submitted to a journal or the proceedings of another conference at any point during the CLeaR review process. Submissions as extended abstracts (5 pages or less), to workshops or non-archival venues (without a proceedings), will not be considered a concurrent submission. Authors may submit anonymized work to CLeaR that is already available as a preprint (e.g., on arXiv) without citing it. If you suspect that a submission that has been assigned to you is a dual submission or if you require further clarification, please contact the corresponding AC. Please see Call for Papers for more information about dual submissions.

    \n

    Violations of formatting instructions. Submissions are limited to 12 single-column PMLR-formatted pages, plus unlimited additional pages for references and appendices. Authors of accepted papers will have the option of opting out of the proceedings in favor of a 1-page extended abstract, which will point to an open access archival version of the full paper reviewed for CLeaR. If you are assigned a paper that is overlength or appears to violate the CLeaR proceedings format (e.g., by decreasing margins or font size, by removing some pre-fixed spaces, etc), please notify the corresponding AC immediately.

    \n \n

    * Please also review the policies in the CLeaR 2023 Call for Papers.

    \n
    \n )\n}\n\nexport default ReviewerInstruction2023page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport OCTable from '../OrganizingCommitteePage/components/OCTable/OCTable'\nimport {array ,array1} from '../../../common/constants/advisoryboard'\n\nconst AdvisoryBoard2023page = () => {\n return (\n
    \n Advisory Board\n \n \n \n \n \n
    \n
    \n )\n}\n\nexport default AdvisoryBoard2023page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport './SponsorsPage.scss';\n\nconst Sponsors2023page = () => {\n return (\n
    \n Sponsors\n
    \n
    Gold Sponsors
    \n \n \n \n {/* */}\n \n \n
    \n
    Silver Sponsors
    \n \n \n {/* */}\n \n \n
    \n
    Bronze Sponsors
    \n \n \n \n \n
    \n
    \n
    \n )\n}\n\nexport default Sponsors2023page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst AreaChair2023page = () => {\n return (\n
    \n Area Chairs\n Erich Kummerfeld\t(Carnegie Mellon University)
    \n Jiji Zhang\t(The Chinese University of Hong Kong)
    \n Jin Tian\t(Iowa State University)
    \n Joris Mooij\t(University of Amsterdam)
    \n Kun Zhang\t(Carnegie Mellon University)
    \n Lin Liu\t(Shanghai Jiao Tong University)
    \n Mingming Gong\t(University of Melbourne)
    \n Razieh Nabi\t(Emory University)
    \n Rohit Bhattacharya\t(Williams College)
    \n Sach Mukherjee\t(University of Cambridge)
    \n Sara Magliacane\t(University of Amsterdam)
    \n Shohei Shimizu\t(Shiga University)
    \n Simon Lacoste-Julien\t(University of Montreal)
    \n Sofia Triantafillou\t(University of Crete)
    \n Thomas Richardson\t(University of Washington)
    \n Tom Claassen\t(Radboud University Nijmegen)
    \n Vasilis Syrgkanis\t(Microsoft)\n
    \n )\n}\n\nexport default AreaChair2023page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst ProgramCommittee2023page = () => {\n return (\n
    \n Program Committee\n \n TBD\n
    \n )\n}\n\nexport default ProgramCommittee2023page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport DatesTable from \"../ImportantDatesPage/components/DateTable/DatesTable\";\nimport { importantDates2023 } from \"../../../common/constants/importantDates\";\nimport \"./CLeaR2023Page.scss\";\n\nconst CLeaR2023page = () => {\n return (\n
    \n CLeaR (Causal Learning and Reasoning) 2023\n
    \n

    \n Causality is a fundamental notion in science and engineering. In the\n past few decades, some of the most influential developments in the\n study of causal discovery, causal inference, and the causal treatment\n of machine learning have resulted from cross-disciplinary efforts. In\n particular, a number of machine learning and statistical analysis\n techniques have been developed to tackle classical causal discovery\n and inference problems. On the other hand, the causal view has been\n shown to be able to facilitate formulating, understanding, and\n tackling a number of hard machine learning problems in transfer\n learning, reinforcement learning, and deep learning.\n

    \n

    \n CLeaR 2022: Starting a brand new conference in these pandemic years\n and ensuring it is set up for long-term success has been a significant\n undertaking. Despite these challenges, more than 50 people attended the\n conference in person and several hundred connected remotely. We had 9\n oral presentations and 40 posters, covering topics that range from causal\n discovery, causal fairness, explainability, non-parametric inference,\n causal Markov decision processes, to social-influence estimation,\n applications of causality, and other topics. We have received a number\n of enquiries about whether and where to hold CLeaR 2023 and are\n delighted to announce the next edition.\n

    \n

    \n We invite submissions to the 2nd conference on Causal Learning and\n Reasoning (CLeaR), and welcome paper submissions that describe new\n theory, methodology, and/or applications relevant to any aspect of\n causal learning and reasoning in the fields of artificial intelligence\n and statistics. Submitted papers will be evaluated based on their\n novelty, technical quality, and potential impact. Experimental methods\n and results are expected to be reproducible, and authors are strongly\n encouraged to make code and data available. We also encourage\n submissions of proof-of-concept research that puts forward novel ideas\n and demonstrates potential for addressing problems at the intersection\n of causality and machine learning. CLeaR 2023 will be held in Tübingen,\n Germany from April 11 to 14, 2023, with virtual elements. \n

    \n Topics of submission may include, but are not limited to:\n
      \n
    • Machine learning building on causal principles
    • \n
    • Causal discovery in complex environments
    • \n
    • Efficient causal discovery in large-scale datasets
    • \n
    • Causal effect identification and estimation
    • \n
    • Causal generative models for machine learning
    • \n
    • \n Unsupervised and semi-supervised deep learning connected to causality\n
    • \n
    • Machine learning with heterogeneous data sources
    • \n
    • Benchmark for causal discovery and causal reasoning
    • \n
    • Reinforcement learning
    • \n
    • \n Fairness, accountability, transparency, explainability,\n trustworthiness, and recourse{\" \"}\n
    • \n
    • Applications of any of the above to real-world problems
    • \n
    • Foundational theories of causation
    • \n
    \n

    Submit at https://openreview.net/group?id=cclear.cc/CLeaR/2023/Conference.

    \n
    \n

    \n
    \n
    Updates
    \n
      \n
    • 10/08/2023, The proceedings of CLeaR 2023 are released. PLease find the details here
    • \n
    • 05/05/2023, The table discussion results on Causality in the time of LLMs are released. PLease find the details here
    • \n
    • 26/04/2023, We are excited to announce that the Best Paper award has been presented to Backtracking Counterfactuals by Julius Von Kügelgen, Abdirisak Mohamed, and Sander Beckers.
    • \n
    • 11/04/2023, Details about the social activities are now online (see under Program)
    • \n
    • The start and end of the conference: Registration from noon to 1pm on Tuesday Apr 11th. Conference ends 1pm Friday Apr 14th.
    • \n
    • 09/03/2023, presentation instructions can be found at Presentation Instructions.
    • \n
    • 02/03/2023, accepted papers can be found at Accepted Paper List.
    • \n
    • 12/02/2023, CleaR 2023 conference registration is open. Please register here.
    • \n
    \n
    \n

    \n
    \n
    Important Dates
    \n \n
    \n
    \n );\n};\n\nexport default CLeaR2023page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst FullAgenda2023page = () => {\n return (\n
    \n Full Agenda\n

    The table discussion results on Causality in the time of LLMs are released. PLease find the details here

    \n
    Day 1 (April 11, Tuesday)
    \n \n 12:00-1:00 PM Registration \n {/*
      \n
    • Panellists: Elizabeth Ogburn, Thomas Richardson, Eric Tchetgen Tchetgen, Caroline Uhler
    • \n
    */}\n \n 1:00-1:30 PM Welcome\n {/*
      \n
    • Judea Pearl (University of California, Los Angeles): The Science of Cause and Effect: From Deep Learning to Deep Understanding
    • \n
    */}\n \n 1:30-2:30 PM Keynote by Miguel Hernan\n
      \n
    • Causal AI for data analysis in the health sciences
    • \n
    \n \n 2:30-4:00 PM Poster I\n
      \n
    • Poster I.1. Sample-Specific Root Causal Inference with Latent Variables
    • \n
    • Poster I.2. Causal Discovery for Non-stationary Non-linear Time Series Data Using Just-In-Time Modeling
    • \n
    • Poster I.3. Causal Models with Constraints
    • \n
    • Poster I.4. Non-parametric identifiability and sensitivity analysis of synthetic control models
    • \n
    • Poster I.5. Factorization of the Partial Covariance in Singly-Connected Path Diagrams
    • \n
    • Poster I.6. Estimating long-term causal effects from short-term experiments and long-term observational data with unobserved confounding
    • \n
    • Poster I.7. Learning Causal Representations of Single Cells via Sparse Mechanism Shift Modeling
    • \n
    • Poster I.8. Scalable Causal Discovery with Score Matching
    • \n
    • Poster I.9. Instrumental Processes Using Integrated Covariances
    • \n
    \n 4:00-5:00 PM Oral I\n
      \n
    • Oral I.1. Generalizing Clinical Trials with Convex Hulls
    • \n
    • Oral I.2. Backtracking Counterfactuals
    • \n
    • Oral I.3. Stochastic Causal Programming for Bounding Treatment Effects
    • \n
    \n Evening (onwards) Social Activity I\n {/* 14:00-15:00 CLeaR talks: Causal discovery and inference (17 minutes + 3-minute Q&A for each talk)
    \n
      \n
    • Differentially Private Estimation of Heterogeneous Causal Effects; by Fengshi Niu, Harsha Nori, Brian Quistorff, Rich Caruana, Donald Ngwe, Aadharsh Kannan
    • \n
    • Interactive rank testing by betting; by Boyan Duan, Aaditya Ramdas, Larry Wasserman
    • \n
    • Typing assumptions improve identification in causal discovery; by Philippe Brouillard, Perouz Taslakian, Alexandre Lacoste, Sebastien Lachapelle, Alexandre Drouin
    • \n
    \n */}\n {/* 15:30-15:45 CLeaR spotlights: Poster session 1 (in person; 1.5 minutes for each spotlight)
    \n
      \n
    • A.1. Amortized Causal Discovery: Learning to Infer Causal Graphs from Time-Series Data; by Sindy Löwe, David Madras, Richard Zemel, Max Welling
    • \n
    • A.2. A Distance Covariance-based Kernel for Nonlinear Causal Clustering in Heterogeneous Populations; by Alex Markham, Richeek Das, Moritz Grosse-Wentrup
    • \n
    • A.3. Differentiable Causal Discovery Under Latent Interventions; by Gonçalo Rui Alves Faria, Andre Martins, Mario A. T. Figueiredo
    • \n
    • A.4. Process Independence Testing in Proximal Graphical Event Models; by Debarun Bhattacharjya, Karthikeyan Shanmugam, Tian Gao, Dharmashankar Subramanian
    • \n
    • A.5. Predictive State Propensity Subclassification (PSPS): A causal inference algorithm for data-driven propensity score stratification; by Joseph Kelly, Jing Kong, Georg M. Goerg
    • \n
    • A.6. A Uniformly Consistent Estimator of non-Gaussian Causal Effects Under the k-Triangle-Faithfulness Assumption; by Shuyan Wang, Peter Spirtes
    • \n
    • A.7. Causal Discovery in Linear Structural Causal Models with Deterministic Relations; by Yuqin Yang, Mohamed S Nafea, AmirEmad Ghassami, Negar Kiyavash
    • \n
    • A.8. Causal Imputation via Synthetic Interventions; by Chandler Squires, Dennis Shen, Anish Agarwal, Devavrat Shah, Caroline Uhler
    • \n
    • A.9. Causal Bandits without prior knowledge using separating sets; by Arnoud De Kroon, Joris Mooij, Danielle Belgrave
    • \n
    • A.10. Data-driven exclusion criteria for instrumental variable studies; by Tony Liu, Patrick Lawlor, Lyle Ungar, Konrad Kording
    • \n
    \n \n 15:45-16:00 CLeaR spotlights: Poster session 2 (remote; 1.5 minutes for each spotlight)
    \n
      \n
    • B.1. A Multivariate Causal Discovery based on Post-Nonlinear Model; by Kento Uemura, Takuya Takagi, Kambayashi Takayuki, Hiroyuki Yoshida, Shohei Shimizu
    • \n
    • B.2. Bivariate Causal Discovery via Conditional Divergence; by Bao Duong, Thin Nguyen
    • \n
    • B.3. Diffusion Causal Models for Counterfactual Estimation; by Pedro Sanchez, Sotirios A. Tsaftaris
    • \n
    • B.4. Equality Constraints in Linear Hawkes Processes; by Søren Wengel Mogensen
    • \n
    • B.5. Local Constraint-Based Causal Discovery under Selection Bias; by Philip Versteeg, Joris Mooij, Cheng Zhang
    • \n
    • B.6. On the Equivalence of Causal Models: A Category-Theoretic Approach; by Jun Otsuka, Hayato Saigo
    • \n
    • B.7. CausalCity: Complex Simulations with Agency for Causal Discovery and Reasoning; by Daniel McDuff, Yale Song, Jiyoung Lee, Vibhav Vineet, Sai Vemprala, Nicholas Alexander Gyde, Hadi Salman, Shuang Ma, Kwanghoon Sohn, Ashish Kapoor
    • \n
    • B.8. Causal Discovery for Linear Mixed Data; by Yan Zeng, Shohei Shimizu, Hidetoshi Matsui, Fuchun Sun
    • \n
    • B.9. Identifying Principal Stratum Causal Effects Conditional on a Post-treatment Intermediate Response; by Xiaoqing Tan, Judah Abberbock, Priya Rastogi, Gong Tang
    • \n
    • B.10. Integrative R-learner of heterogeneous treatment effects combining experimental and observational studies; by Lili Wu, Shu Yang
    • \n
    \n \n 16:00-16:45 CLeaR poster session 1 (in person)\n \n 16:45-17:30 CLeaR poster session 2 (online via Zoom breakout rooms)\n */}\n
    \n
    \n
    Day 2 (April 12, Wednesday)
    \n \n 9:00-9:30 AM Arrival + coffee\n 9:30-10:30 AM Keynote by Negar Kiyavash\n
      \n
    • Causal Identification: Are We There Yet?
    • \n
    \n 10:30-11:00 AM Coffee\n {/*
      \n
    • Panellists: Stefan Bauer, Rosemary Nan Ke, Negar Kiyavash, David Lopez-Paz
    • \n
    */}\n 11:00-12:30 PM Poster II\n
      \n
    • Poster II.1. Branch-Price-and-Cut for Causal Discovery
    • \n
    • Poster II.2. Causal Discovery with Score Matching on Additive Models with Arbitrary Noise
    • \n
    • Poster II.3. Beyond the Markov Equivalence Class: Extending Causal Discovery under Latent Confounding
    • \n
    • Poster II.4. Learning Conditional Granger Causal Temporal Networks
    • \n
    • Poster II.5. Practical Algorithms for Orientations of Partially Directed Graphical Models
    • \n
    • Poster II.6. Enhancing Causal Discovery from Robot Sensor Data in Dynamic Scenarios
    • \n
    • Poster II.7. Leveraging Causal Graphs for Blocking in Randomized Experiments
    • \n
    • Poster II.8. Generalizing Clinical Trials with Convex Hulls
    • \n
    • Poster II.9. Backtracking Counterfactuals
    • \n
    • Poster II.10. Stochastic Causal Programming for Bounding Treatment Effects
    • \n
    \n 12:30-2:30 PM Lunch\n 2:30-3:30 PM Keynote by Aapo Hyvarinen\n
      \n
    • Causal discovery and latent-variable models
    • \n
    \n 3:30-4:00 PM Townhall Meeting\n 4:00-5:00 PM Panel discussion: Datasets\n 5:00-6:00 PM Oral II\n
      \n
    • Oral II.1. Distinguishing Cause from Effect on Categorical Data: The Uniform Channel Model
    • \n
    • Oral II.2. Jointly Learning Consistent Causal Abstractions Over Multiple Interventional Distributions
    • \n
    • Oral II.3. An Algorithm and Complexity Results for Causal Unit Selection
    • \n
    \n Evening (onwards) Dinner / get-together\n
    \n
    \n
    Day 3 (April 13, Thursday)
    \n \n 9:00-9:30 AM Arrival + coffee\n 9:30-10:30 AM Breakout session\n 10:30-11:00 AM Coffee\n {/*
      \n
    • Panellists: Stefan Bauer, Rosemary Nan Ke, Negar Kiyavash, David Lopez-Paz
    • \n
    */}\n 11:00-12:30 PM Poster III\n
      \n
    • Poster III.1. A Meta-Reinforcement Learning Algorithm for Causal Discovery
    • \n
    • Poster III.2. Causal Inference Despite Limited Global Confounding via Mixture Models
    • \n
    • Poster III.3. Causal Triplet: An Open Challenge for Intervention-centric Causal Representation Learning
    • \n
    • Poster III.4. Image-based Treatment Effect Heterogeneity
    • \n
    • Poster III.5. Causal Learning through Deliberate Undersampling
    • \n
    • Poster III.6. Influence-Aware Attention for Multivariate Temporal Point Processes
    • \n
    • Poster III.7. Evaluating Temporal Observation-Based Causal Discovery Techniques Applied to Road Driver Behaviour
    • \n
    • Poster III.8. Directed Graphical Models and Causal Discovery for Zero-Inflated Data
    • \n
    • Poster III.9. An Algorithm and Complexity Results for Causal Unit Selection
    • \n
    • Poster III.10. Local Dependence Graphs for Discrete Time Processes
    • \n
    \n {/*
      \n
    • Peter Spirtes (Carnegie Mellon University): Inferring Causal Relations from Sample Data
    • \n
    */}\n 12:30-2:30 PM Lunch\n 2:30-6:00 PM Social Activity II\n {/* 14:00-15:00 CLeaR talks: Causality and other learning problems (17 minutes + 3-minute Q&A for each talk)\n
      \n
    • Learning Casual Overhypotheses through Exploration in Children and Computational Models; by Eliza Kosoy, Jasmine L Collins, David Chan, Jessica B Hamrick, Rosemary Nan Ke, Sandy Huang, Adrian Liu, John Canny, Alison Gopnik
    • \n
    • Non-parametric Inference Adaptive to Intrinsic Dimension; by Khashayar Khosravi, Greg Lewis, Vasilis Syrgkanis
    • \n
    • Evidence-Based Policy Learning; by Jann Spiess, Vasilis Syrgkanis
    • \n
    \n */}\n {/* 9:30-10:30 Panel 3: Systems & applications\n
      \n
    • Panellists: Victor Chernozhukov, Doina Precup, David Sontag, Eric Xing
    • \n
    \n 11:00-11:45 Plenary talk\n
      \n
    • James Robins (Harvard School of Public Health): The Bold Vision of Artificial Intelligence and Philosophy: Directed Acyclic Graphs and Causal Discovery from Non-Independence Constraints
    • \n
    \n 12:00-14:00 Lunch\n 14:00-15:00 CLeaR talks: Causality and ethical AI (17 minutes + 3-minute Q&A for each talk) \n
      \n
    • Causal Explanations and XAI ; by Sander Beckers
    • \n
    • Optimal Training of Fair Predictive Models; by Razieh Nabi, Daniel Malinsky, Ilya Shpitser
    • \n
    • Selection, Ignorability and Challenges With Causal Fairness; by Jake Fawkes, Robin Evans, Dino Sejdinovic
    • \n
    \n \n 15:30-15:45 CLeaR spotlights: Poster session 3 (in person; 1.5 minutes for each spotlight)\n
      \n
    • C.1. Weakly Supervised Discovery of Semantic Attributes; by Ameen Ali Ali, Tomer Galanti, Evgenii Zheltonozhskii, Chaim Baskin, Lior Wolf
    • \n
    • C.2. Attainability and Optimality: The Equalized Odds Fairness Revisited; by Zeyu Tang, Kun Zhang
    • \n
    • C.3. Causal Structure Discovery between Clusters of Nodes Induced by Latent Factors; by Chandler Squires, Annie Yun, Eshaan Nichani, Raj Agrawal, Caroline Uhler
    • \n
    • C.4. Disentanglement via Mechanism Sparsity Regularization: A New Principle for Nonlinear ICA; by Sebastien Lachapelle, Pau Rodriguez, Yash Sharma, Katie E Everett, Rémi LE PRIOL, Alexandre Lacoste, Simon Lacoste-Julien
    • \n
    • C.5. Relational Causal Models with Cycles: Representation and Reasoning; by Ragib Ahsan, David Arbour, Elena Zheleva
    • \n
    • C.6. Same Cause; Different Effects in the Brain; by Mariya Toneva, Jennifer Williams, Anand Bollu, Christoph Dann, Leila Wehbe
    • \n
    • C.7. Cause-effect inference through spectral independence in linear dynamical systems: theoretical foundations; by Michel Besserve, Naji Shajarisales, Dominik Janzing, Bernhard Schölkopf
    • \n
    • C.8. Disentangling Controlled Effects for Hierarchical Reinforcement Learning; by Oriol Corcoll, Raul Vicente
    • \n
    • C.9. Identifying Coarse-grained Independent Causal Mechanisms with Self-supervision; by Xiaoyang Wang, Klara Nahrstedt, Oluwasanmi O Koyejo
    • \n
    • C.10. Learning Invariant Representations with Missing Data; by Mark Goldstein, Joern-Henrik Jacobsen, Olina Chau, Adriel Saporta, Aahlad Manas Puli, Rajesh Ranganath, Andrew Miller
    • \n
    \n \n 15:45-16:00 CLeaR spotlights: Poster session 4 (remote; 1.5 minutes for each spotlight)\n
      \n
    • D.1. Partial Identification with Noisy Covariates: A Robust Optimization Approach; by Wenshuo Guo, Mingzhang Yin, Yixin Wang, Michael Jordan
    • \n
    • D.2. Can Humans Be out of the Loop? by Junzhe Zhang, Elias Bareinboim
    • \n
    • D.3. Efficient Reinforcement Learning with Prior Causal Knowledge; by Yangyi Lu, Amirhossein Meisami, Ambuj Tewari
    • \n
    • D.4. Info Intervention and its Causal Calculus; by Heyang Gong, Ke Zhu
    • \n
    • D.5. Simple data balancing achieves competitive worst-group-accuracy; by Badr Youbi Idrissi, Martin Arjovsky, Mohammad Pezeshki, David Lopez-Paz
    • \n
    • D.6. VIM: Variational Independent Modules for Video Prediction; by Rim Assouel, Lluis Castrejon, Aaron Courville, Nicolas Ballas, Yoshua Bengio
    • \n
    • D.7. Estimating Social Influence from Observational Data; by Dhanya Sridhar, Caterina De Bacco, David Blei
    • \n
    • D.8. Fair Classification with Instance-dependent Label Noise; by Songhua Wu, Mingming Gong, Bo Han, Yang Liu, Tongliang Liu
    • \n
    • D.9. Some Reflections on Drawing Causal Inference using Textual Data: Parallels Between Human Subjects and Organized Texts; by Bo Zhang, Jiayao Zhang
    • \n
    • D.10. Towards efficient representation identification in supervised learning; by Kartik Ahuja, Divyat Mahajan, Vasilis Syrgkanis, Ioannis Mitliagkas
    • \n
    \n \n 16:00-17:30 CLeaR poster session 3 (in person); CLeaR poster session 4 (online via Zoom breakout rooms) */}\n
    \n
    \n
    Day 4 (April 14, Friday)
    \n \n 9:00-9:30 AM Arrival + coffee\n 9:30-10:30 AM Oral III\n
      \n
    • Oral III.1. Directed Graphical Models and Causal Discovery for Zero-Inflated Data
    • \n
    • Oral III.2. Causal Abstraction with Soft Interventions
    • \n
    \n 10:30-11:00 AM Coffee\n 11:00-12:30 PM Poster IV\n
      \n
    • Poster IV.1. On Discovery of Local Independence over Continuous Variables via Neural Contextual Decomposition
    • \n
    • Poster IV.2. Local Causal Discovery for Estimating Causal Effects
    • \n
    • Poster IV.3. Can Active Sampling Reduce Causal Confusion in Offline Reinforcement Learning?
    • \n
    • Poster IV.4. Causal Inference with Non-IID Data under Model Uncertainty
    • \n
    • Poster IV.5. Factual Observation Based Heterogeneity Learning for Counterfactual Prediction
    • \n
    • Poster IV.6. On the Interventional Kullback-Leibler Divergence
    • \n
    • Poster IV.7. Unsupervised Object Learning via Common Fate
    • \n
    • Poster IV.8. Distinguishing Cause from Effect on Categorical Data: The Uniform Channel Model
    • \n
    • Poster IV.9. Jointly Learning Consistent Causal Abstractions Over Multiple Interventional Distributions
    • \n
    • Poster IV.10. Causal Abstraction with Soft Interventions
    • \n
    \n 12:30-1:00 PM Wrap-up\n 1:00-2:30 PM Lunch\n
    \n
    \n
    \n )\n}\n\nexport default FullAgenda2023page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst KeynoteSpeaker2023page = () => {\n return (\n
    \n Plenary Speakers\n \n\n - Aapo Hyvarinen, University of Helsinki\n Title: Causal discovery and latent-variable models \n

    Abstract: There is a deep connection between causal discovery and latent-variable models such as factor analysis, independent component analysis, and various unsupervised deep learning models. In several cases, estimation of a latent-variable model enables causal discovery, due to a kind of equivalence of the two in a purely observational regime. The key concept here is identifiability: We have to find a latent-variable model which is identifiable, i.e. the parameters of the model can be uniquely estimated. Quite often, the identifiability of a latent-variable model then leads to identifiability of a causal discovery model. In this talk, I will review research on this connection and the relevant identifiability theory.

    \n\n\n - Miguel Hernan, CAUSALab / Biostatistics and Epidemiology, Harvard T.H. Chan School \n Title: Causal AI for data analysis in the health sciences \n

    Abstract: The tools referred to as AI may assist, or replace, health researchers who learn from data. This talk describes a taxonomy of learning tasks in science and explores the relationship between two of them: prediction (pattern recognition) and counterfactual prediction (causal inference). Researchers predict counterfactually by using a combination of data and causal models of the world. In contrast, AI tools developed for prediction using only data are being increasingly used for counterfactual prediction. This raises questions about the origin of causal models, and the future of causal inference research in the health sciences.

    \n\n\n - Negar Kiyavash, École polytechnique fédérale de Lausanne \n Title: Causal Identification: Are We There Yet?\n

    Abstract: We discuss causal identifiability, the canonical problem of causal inference, where the goal is to calculate the effect of intervening on subset of variables on an outcome variable of interest. We first visit the definition fo the problem and note that it is necessary to add positivity assumption of observational distribution to the original definition of the problem as without such an assumption the rules of do-calculus and consequently the proposed algorithms in the field are not sound. After discussing state of the art and recent progress in the field, we present some of the open problems and remaining challenges.

    \n\n
    \n
    \n )\n}\n\nexport default KeynoteSpeaker2023page","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\n\nconst CallforPapers2023page = () => {\n return (\n
    \n Call for Papers\n\n

    We invite submissions to the 2nd Conference on Causal Learning and Reasoning (CLeaR), and welcome paper submissions that describe new theory, methodology, and/or applications relevant to any aspect of causal learning and reasoning in the fields of artificial intelligence and statistics. Accepted papers will be published in the Proceedings of Machine Learning Research (PMLR). \n

    \n\t
    Key dates
    \n\tThe planned dates are as follows:\n\t
      \n\t
    • Paper submission deadline: Oct 28, 2022 11:59pm (Anywhere on Earth, AoE)
    • \n\t
    • Reviews released: Dec 2, 2022
    • \n\t
    • Author rebuttals due: Dec 9, 2022 11:59pm (AoE)
    • \n\t
    • Final decisions: Jan 12, 2023
    • \n\t
    • Camera-ready deadline: Feb 20, 2023 11:59pm (AoE)
    • \n\t
    • Conference dates: Apr 11 (Tue) - 14 (Fri), 2023 - Format: Hybrid with both virtual and physical attendance.
    • \n\t
    • Final Camera-ready deadline: May 26, 2023 11:59pm (AoE)
    • \n\t
    \n\t

    Submit at https://openreview.net/group?id=cclear.cc/CLeaR/2023/Conference.

    \n\t
    Summary
    \n\t

    Causality is a fundamental notion in science and engineering. In the past few decades, some of the most influential developments in the study of causal discovery, causal inference, and the causal treatment of machine learning have resulted from cross-disciplinary efforts. In particular, a number of machine learning and statistical analysis techniques have been developed to tackle classical causal discovery and inference problems. On the other hand, the causal view has been shown to facilitate formulating, understanding, and tackling a broad range of problems, including domain generalization, robustness, trustworthiness, and fairness across machine learning, reinforcement learning, and statistics.

    \n\n\t

    We invite papers that describe new theory, methodology and/or applications relevant to any aspect of causal learning and reasoning in the fields of artificial intelligence and statistics. Submitted papers will be evaluated based on their novelty, technical quality, and potential impact. Experimental methods and results are expected to be reproducible, and authors are strongly encouraged to make code and data available. We also encourage submissions of proof-of-concept research that puts forward novel ideas and demonstrates potential for addressing problems at the intersection of causality and machine learning.

    \n\t\n\t
    Paper Submission
    \n\t

    The proceedings track is the standard CLeaR paper submission track. Papers will be selected via a rigorous double-blind peer-review process. All accepted papers will be presented at the Conference as contributed talks or as posters and will be published in the Proceedings.

    \n\t

    \n\tTopics of submission may include, but are not limited to:\n\t

      \n\t
    • Machine learning building on causal principles
    • \n\t
    • Causal discovery in complex environments
    • \n\t
    • Efficient causal discovery in large-scale datasets
    • \n\t
    • Causal effect identification and estimation
    • \n\t
    • Causal generative models for machine learning
    • \n\t
    • Unsupervised and semi-supervised deep learning connected to causality
    • \n\t
    • Machine learning with heterogeneous data sources
    • \n\t
    • Benchmark for causal discovery and causal reasoning
    • \n\t
    • Reinforcement learning
    • \n\t
    • Fairness, accountability, transparency, explainability, trustworthiness, and recourse
    • \n\t
    • Applications of any of the above to real-world problems
    • \n\t
    • Foundational theories of causation
    • \n\t
    \n\t

    \n\n\t
    Physical Attendance
    \n\t

    The CLeaR 2023 organizing committee prioritizes the safety and health of our community. We are still considering the format of the CLeaR 2023 conference. It will be preferably held as a hybrid conference with no mandatory physical attendance, but we also keep a backup plan of making the conference virtual in case of new pandemic situations. After our final decision, we will announce the format of the conference on the website. Thank you for your patience and understanding.

    \n\n\t
    Formatting and Supplementary Material
    \n\t

    Submissions are limited to 12 single-column PMLR-formatted pages, plus unlimited additional pages for references and appendices. Authors of accepted papers will have the option of opting out of the proceedings in favor of a 1-page extended abstract, which will point to an open access archival version of the full paper reviewed for CLeaR. You can also submit a single file of additional supplementary material separately, which may be either a pdf file (containing proof details, for instance) or a zip file that can include multiple files of all formats (such as code or videos). Note that reviewers are under no obligation to examine the supplementary material.

    \n\n\t

    Please format the paper using the official LaTeX style files. We do not support submission in formats other than LaTeX. Please do not modify the layout given by the style file.

    \n\t\n\t

    Submissions will be through OpenReview (https://openreview.net/group?id=cclear.cc/CLeaR/2023/Conference) and will be open approximately 4-6 weeks before the paper submission deadline.

    \n\n\t
    Anonymization Requirements
    \n\t

    The CLeaR review process is double-blind: reviewers and authors will both stay anonymous to each other during the review process. We use OpenReview to host papers; however, public discussions are not allowed during the review process. The review comments are only visible to program chairs, area chairs, and reviewers with submitted review comments. Papers will be desk-rejected if they contain any information that can violate the double-blind reviewing policy, such as the author names or their affiliations, acknowledgements, or links that can infer any author’s identity or institution. Self-citations are allowed as long as anonymity is preserved. It is up to the author’s discretion how best to preserve anonymity when including self-citations. Possibilities include: leaving out a self-citation, including it but replacing the citation text with “removed for anonymous submission,” or leaving the citation as-is. We recommend leaving in a moderate number of self-citations for published or otherwise well-known work.

    \n\n\t

    Revisions are allowed in the submission system until the paper submission deadline. Changes will not be allowed afterwards.

    \n\n\t

    We strongly discourage advertising the preprint on social media or in the press while under submission to CLeaR. Preprints must not be explicitly identified as an CLeaR submission at any time during the review period (i.e., from the abstract submission deadline until the notification of the accept/reject decision).

    \n\n\t
    Dual Submissions
    \n\t

    CLeaR does not allow double submissions. Namely, submissions should not have been previously published in or submitted to a journal or the proceedings of another conference at any point during the CLeaR review process. Submissions to workshops or other non-archival venues (without a proceedings) will not be considered as dual submissions. Submissions as extended abstracts with 5 pages or less will not be considered a concurrent submission either. Authors may submit anonymized work to CLeaR that is already available as a preprint (e.g., on arXiv) without citing it. Tech reports (including reports on sites such as arXiv) do not count as prior publication. It is acceptable to have a substantially extended version of the submitted paper under consideration simultaneously for journal publication, so long as the journal version’s planned publication date is after our publication (April 13, 2023, tentatively), and it does not violate the journal's policy, the journal submission does not interfere with CLeaR right to publish the paper, and the situation is clearly described at the time of CLeaR submission. Please describe the situation in the appropriate box on the submission page (and do not include author information in the submission itself, to avoid accidental unblinding). Authors are also allowed to give talks to restricted audiences on the work(s) submitted to CLeaR during the review.

    \n\n\t

    All accepted papers will be presented at the Conference either as contributed talks or as posters, and will be published in the CLeaR Conference Proceedings in the Journal of Machine Learning Research Workshop and Conference Proceedings series. Papers for talks and posters will be treated equally in publication.

    \n\n\t
    Confidentiality
    \n\t

    The reviewers and area-chairs will have access to papers and supplementary materials that are assigned to them.

    \n\t\n\t

    The program chairs and workflow chairs will have access to all the papers. Everyone having access to papers and supplementary materials will be instructed to keep them confidential during the review process and delete them after the final decisions.

    \n\t\n\t

    Reviews will be visible to area chairs, program chairs, and workflow chairs throughout the process. At any stage of the process, author names will not be known to the reviewers or area chairs, but only visible to program chairs. Reviewer names are visible to the area chair (and program chairs), but the reviewers will not know names of other reviewers.

    \n\n\t

    Mihaela van der Shaar, Cheng Zhang & Dominik Janzing
    \n\tCLeaR 2023 Program Chairs

    \n\n\t

    Francesco Locatello & Peter Spirtes
    \n\tCLeaR 2023 General Chairs

    \n
    \n )\n}\n\nexport default CallforPapers2023page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\n\nconst CallforDatasets2023page = () => {\n return (\n
    \n Call for Causal Datasets\n\n

    We are thrilled to announce our causal benchmark dataset track as part of our CLeaR 2023 conference. High-quality, publicly available causal datasets are critical for advancing the field of causality, and we seek to provide researchers with a diverse collection of datasets that can be routinely used to test, benchmark, and improve the overall performance of causal methods and algorithms. This includes but is not limited to causal inference, causal discovery, and causal representation learning. By submitting your dataset to the CLeaR 2023 dataset track, you will not only be contributing to enabling new discoveries in the field of causality, but also empowering the real-world impact in various domains such as natural sciences, healthcare, social sciences, and economics. Together with the various research fields, we hope to have a positive impact on the future of the field, and to advance our understand of causality. We encourage researchers from all fields to submit their datasets and be part of this exciting initiative.

    \n\t
    Submission Guideline
    \n\t\n\t

    We hope to make this initiative as inclusive as possible, therefore datasets with diversity in terms of domain, complexity, and scale are all welcome to submit. Submissions should include a clear description of the underlying problem setting, data format, causal quantities of interest, as well as clear definition of evaluation metrics. Submissions are NOT required to be anonymized. All submissions should comply with the TMLR guidelines for “General Ethical Conduct” outlined here. We note that all submissions will receive a light touch review by committee members to verify that they comply with the guidelines above and ensure the data provided will be of value to the causality community.

    \n\n

    Non-archival policy. The dataset track at CLEAR is a non-archival venue and will not have official proceedings. Submissions to the dataset track can be subsequently or concurrently submitted to other venues, but will be hosted on a dedicated website (and indexed by Google Scholar).

    \n\n

    To submit a dataset, please send the following material to our official email (clear.datasets.2023@gmail.com):

    \n\t
      \n\t
    • An extended abstract paper (please follow the template described here). This should include a brief description of the dataset, which may include but not limited to:\n\t\t
        \n\t\t
      • background information;
      • \n\t\t
      • data collection (if real-world dataset) or generation (if synthetic dataset) details;
      • \n\t\t
      • definition of the underlying causal quantities;
      • \n\t\t
      • details of causal ground truth;
      • \n\t\t
      • definition of evaluation metrics.
      • \n\t\t
      \n\t\tAlthough there is no hard page limit, we recommend to keep it brief and encourage submissions of 4 or less pages. Further technical details can be included in the github link as detailed below .
    • \n\t
    • A github link to the dataset repo, which should store the download link to the dataset, and include further instructions and further technical details. The following materials are welcome to be included, but not required:
    • \n\t\t
        \n\t\t
      • comprehensive readme;
      • \n\t\t
      • data processing and loading scripts;
      • \n\t\t
      • evaluation scripts;
      • \n\t\t
      • baseline evaluation results.
      • \n\t\t
      \n\t
    \n\n
    How it works
    \n

    Formal review process will not be conducted for submissions. Instead, this initiative will contain the following phases:

    \n
      \n\t
    • Screening phase: all submissions will be first screened by internal organizers to exclude low-quality datasets.
    • \n\t
    • Publication phase: all submissions that passed basic screening will be published in a dedicated dataset website.
    • \n\t
    • Public voting phase: in the dedicated dataset website, we will provide mechanisms for all participants of CLeaR 2023 to vote on each dataset. Voting criterias and guidelines will be detailed on the dedicated website. We will then sort all submissions according to their votes, and nominate the best dataset award.
    • \n\t
    \n\n\t
    Key dates
    \n\t
      \n\t
    • Submission deadline: Mar 10, 2023
    • \n\t
    • Acceptance Notification: Mar 27, 2023
    • \n\t
    • Publication on dedicated website: TBD
    • \n\t
    \n
    \n )\n}\n\nexport default CallforDatasets2023page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst ACInstruction2023page = () => {\n return (\n
    \n AC Instructions\n

    Thank you for serving as an area chair for CLeaR 2023. As an area chair, your job is to (1) ensure that all the submissions you are assigned have high-quality reviews and good discussions, and (2) write quality meta-reviews and make acceptance decisions. If there is any question, please contact the program chairs chairs.clear2023@gmail.com. The main tasks of area chairs are listed below.

    \n
    Main tasks
    \n
      \n
    1. Preparation (by Oct 28, 2022)
    2. \n
        \n
      • CLeaR 2023 is using the OpenReview System. Please create your OpenReview profile if you do not have one and make sure it is up to date if you already have an account.
      • \n
      • Please read and agree to CleaR 2023 codes of conduct and declare the right conflicts of interests.
      • \n
      • In addition to the guidelines below, please be familiar with the reviewer instructions. You will be interacting significantly with reviewers, so please make sure you understand what is expected of them.
      • \n
      \n
    3. Bid on papers (Oct 31, 2022 - Nov 4, 2022)
    4. \n
        \n
      • Log into OpenReview and bid on submissions that fall into your area of expertise. Your bidding is an important input to the overall matching results.
      • \n
      \n
    5. Check reviewer assignment (Nov 7, 2022 - Nov 9, 2022)
    6. \n
        \n
      • Make sure that every submission in your batch is matched with suitable reviewers before the reviewing process starts.
      • \n
      • If you notice a conflict of interest with a submission that is assigned to you, please contact the program chairs immediately so that the paper will be reassigned.
      • \n
      • You can invite extra reviewers, either in the existing pool or by inviting external reviewers, even after the reviewing process starts.
      • \n
      \n
    7. Make sure all papers have quality reviews (Nov 29, 2022 - Dec 2, 2022)
    8. \n
        \n
      • Initial reviews are due Monday, Nov 29. You might need to send multiple reminder emails. If a reviewer is unable to deliver a review, please find a replacement reviewer who is able to do emergency reviews.
      • \n
      • Read all reviews carefully. If a review is substandard, you should ask the reviewer to improve their review.
      • \n
      \n
    9. Discuss with reviewers and authors (Dec 12, 2022 -- Dec 30, 2022)
    10. \n
        \n
      • As soon as the discussion period starts, initiate and lead a discussion via OpenReview for each submission, and make sure the reviewers engage in the discussion phase.
      • \n
      • Make sure your reviewers read and respond to all author responses.
      • \n
      • A further discussion with the authors will be enabled during the discussion period. Please be cautious when posting a comment about who can see the discussions, only the reviewers or both reviewers and authors.
      • \n
      \n
    11. Make accept/reject decisions: (Jan 2, 2023 - Jan 9, 2023)
    12. \n
        \n
      • Write a meta-review that explains your decision (accept or reject) to the authors. Your comments should augment the reviews, and explain how the reviews, author response, and discussion were used to arrive at your decision. Do not dismiss or ignore a review unless you have a good reason for doing so. If the reviewers cannot come to a consensus, you should read the paper carefully and write a detailed meta-review.
      • \n
      • If you cannot make a decision, please reach out to the program chairs at your earliest convenience.
      • \n
      \n
    \n \n

    * Please review the policies in the CLeaR 2023 Call for Papers and Reviewer Instructions.

    \n
    \n )\n}\n\nexport default ACInstruction2023page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst CodeConduct2023page = () => {\n return (\n
    \n Code of Conduct\n
    Purpose
    \n

    At CLeaR 2023, our goal is to create a community and environment that recognizes and respects the intrinsic value of everyone, which is essential for the open exchange of ideas, freedom of thought and expression, and respectful scientific debate at the conference.

    \n
    Who
    \n

    All participants, including but not limited to organizers, reviewers, speakers, sponsors, and volunteers at our Conference and Conference-sponsored social events are required to agree with this Code of Conduct both during an event and on official communication channels, including social media. In particular, sponsors should not use sexual, racial, or other offensive images, events, or other materials. This code applies to both official sponsors and any organization that uses the conference name as a brand as part of its activities during or around the conference.

    \n
    Policy
    \n

    CLeaR is committed to providing all participants with an experience free from harassment, bullying, discrimination and retaliation. This includes offensive comments related to age, gender, gender identity and expression, race, sexual orientation, physical or intellectual disability, physical appearance, body type, ethnicity, religion, politics, technical choices, or any other personal characteristics. No tolerance for bullying, intimidation, personal assault, harassment, continuous interruption of conversations or other activities, and behavior that interferes with the full participation of other participants. This includes sexual harassment, stalking, stalking, harassing photography or recording, inappropriate physical contact, unwelcome sexual attention, vulgar communication, and diminutive characteristics. The policies apply to actual meeting sites and conference venues, including both physical venues, online venues, and official virtual platforms, including but not limited to Openreview comments, video, virtual streaming, Q&A tools. For example, offensive or even threatening comments in Open review are prohibited. Also, Zoom bombing or any virtual activities that have nothing to do with the topic of discussion and that are detrimental to the purpose of the topic or program are not allowed.

    \n
    Action
    \n

    Participants who are asked by any member of the community to stop any improper behavior defined here should immediately comply. Meeting organizers may take further actions at their discretion, including: formally or informally warning offenders, expelling the meeting without refunds, refusing to submit or participate in future CLeaR meetings, reporting the incident to the offender’s local agency or funding agency, or reporting to the local Authorities or law enforcement agencies report the incident. The ''just kidding'' response is unacceptable. If action is taken, an appeal procedure will be provided.

    \n
    Complaint reporting
    \n

    If you have any concerns about possible violation of the policies, please contact the conference chairs (chairs.clear2023@gmail.com) as soon as possible. Reports made during the conference will be responded to within 24 hours; those at other times in less than two weeks. Complaints and violations will be handled with discretion. We are ready and eager to help attendees contact relevant help services, escort them to a safe place, or to otherwise help people who have been harassed feel safe during the meeting. We gratefully accept feedback from the CLeaR community on our policy and actions.

    \n

    A similar version has been used by other conferences, such as ICLR 2020 and UAI 2020.

    \n
    \n\n );\n};\n\nexport default CodeConduct2023page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst CodeConduct2023page = () => {\n return (\n
    \n Conflicts of Interest\n

    This page defines what constitutes a conflict of interest for the CLeaR 2023 review process.

    \n

    If you are an author, reviewer, or area chair, please make sure to create or update your OpenReview profile. You will be asked to declare two types of conflicts---domain conflicts and personal conflicts. Both types are declared by filling out appropriate sections of your OpenReview profile, as described below.

    \n
      \n
    • Domain conflicts (entered in Education & Career History)
    • \n Please ensure that this section accurately represents your domain conflicts for (at least) the last three years. When you enter a domain conflict, none of your submissions will be visible to reviewers or area chairs who have also entered this domain conflict. Only the last three years of your and their Education & Career History will be used.\n
    • Personal conflicts (entered in Advisors, Relations & Conflicts)
    • \n You should also enter your personal conflicts, including (1) family or close personal relationship, (2) Ph.D. advisee/advisor relationship, and (3) current, frequent, or recent collaboration (including internships) within the past three years.\n If you have any questions about special circumstances not discussed above, please contact program chairs at chairs.clear2023@gmail.com.\n
    \n
    \n );\n};\n\nexport default CodeConduct2023page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\n\nconst CameraReady2023page = () => {\n return (\n
    \n Camera-ready Instructions\n

    TBD

    \n \n
    \n );\n};\n\nexport default CameraReady2023page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport './RegistrationPage.scss';\n\nconst Registration2023page = () => {\n return (\n
    \n Venue and Registration\n
    Venue
    \n

    CLeaR 2023 will be held at the Amazon Development Center, Tübingen, Germany on April 11-14, 2023. We encourage in-person participation, but the conference also has virtual elements.

    \n
    Registration
    \n

    Please register for the CLeaR 2023 here (registration closes March 15). Please let us know if you need letters for visa application.

    \n
    Your way to Tuebingen
    \n

    Please check out the description of the Max-Planck-Institute for Intelligent Systems on how to get to Tuebingen using public transport here.

    \n
    Public transport in Tuebingen
    \n

    You will receive a public transport ticket during your registration, which is valid during the conference period (11th to the 14th), but only for the city busses in Tuebingen (it is not valid all the way to Stuttgart or the airport). Note: You need to get to the registration by buying a one-way ticket!

    \n

    Google maps’ public transport suggestions work fairly well in Tuebingen. There are multiple bus lines that bring you to the venue or close to it.

    \n

    The easiest is bus line 3 towards Waldhäuser Ost, get off the bus at “Max-Planck-Institute” and you will see the Amazon building right away.

    \n \"Google\n

    Other options are bus lines 4 or 6, also towards Waldhäuser Ost, bring you close to the venue as well. However, you will need to walk from there for up to 10 minutes to get to the venue (get off at “Winkelwiese”).

    \n \"Google\n

    Bus line 17 also works (get off at “Robert-Gradmann-Weg” and continue walking up the hill).

    \n \"Google\n
    Hotel Accommodations
    \n

    We do not partner with any hotel in Tuebingen. Here is an incomplete list:

    \n \n
    \n )\n}\n\nexport default Registration2023page\n\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst PresentationInstruction2023page = () => {\n return (\n
    \n Presentation Instructions\n
    Instructions for orals:
    \n
      \n
    • Oral presentations are 17min + 3min for questions
    • \n
    • Every paper accepted as oral should also prepare and present a poster
    • \n
    \n
    Instructions for posters:
    \n
      \n
    • The poster-boards cover A0 in portrait orientation, please aim for a size between A1 and A0
    • \n
    \n
    Remote presentation:
    \n

    We highly recommend to attend the conference in-person, even more so for presenting your accepted papers.

    \n

    Please contact us at zietld@amazon.com in case you cannot present (oral or poster) in-person (subject: [CleaR 2023 remote]).

    \n
    \n )\n}\n\nexport default PresentationInstruction2023page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst ZoomLink = ({ topic, time, link, meetingId, passcode, localNumberLink }) => (\n
    \n
    {topic}
    \n

    Time: {time}

    \n

    Join Zoom Meeting:

    \n

    {link}

    \n

    Meeting ID: {meetingId}

    \n

    Passcode: {passcode}

    \n

    Find your local number

    \n
    \n);\n\n\nconst papers = [\n {\n title: \"Poster I.1. Sample-Specific Root Causal Inference with Latent Variables\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/CLeaR_EEL_Poster.pdf\"\n },\n {\n title: \"Poster I.2. Causal Discovery for Non-stationary Non-linear Time Series Data Using Just-In-Time Modeling\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/CLeaR2023PosterComplete.pdf\"\n },\n {\n title: \"Poster I.3. Causal Models with Constraints\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/CLeaR_poster-5.pdf\"\n },\n {\n title: \"Poster I.4. Non-parametric identifiability and sensitivity analysis of synthetic control models\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/CLeaR2023_%20Spotify-7.pdf\"\n },\n {\n title: \"Poster I.6. Estimating long-term causal effects from short-term experiments and long-term observational data with unobserved confounding\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/CLeaR2023_Poster_VanGoffrier.pdf\"\n },\n {\n title: \"Poster I.7. Learning Causal Representations of Single Cells via Sparse Mechanism Shift Modeling\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/Learning_Causal_Representations_of_Single_Cells_via_Sparse_Mechanism_Shift_Modeling.pdf\"\n },\n {\n title: \"Poster I.9. Instrumental Processes Using Integrated Covariances\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/posterI10.pdf\"\n },\n {\n title: \"Poster II.1. Branch-Price-and-Cut for Causal Discovery\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/poster_1.pdf\"\n },\n {\n title: \"Poster II.3. Beyond the Markov Equivalence Class: Extending Causal Discovery under Latent Confounding\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/poster_CLeaR_2023_final.pdf\"\n },\n {\n title: \"Poster II.4. Learning Conditional Granger Causal Temporal Networks\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/Learning_Conditional_Granger_Causal_Temporal_Networks.pdf\"\n },\n {\n title: \"Poster II.5. Practical Algorithms for Orientations of Partially Directed Graphical Models\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/Poster_Practical_Algorithms_for_Orientations_of_Partially_Directed_Graphical_Models.pdf\"\n },\n {\n title: \"Poster II.6. Enhancing Causal Discovery from Robot Sensor Data in Dynamic Scenarios\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/Postercopy.pdf\"\n },\n {\n title: \"Poster II.7. Leveraging Causal Graphs for Blocking in Randomized Experiments\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/CleaR2023-poster.pdf\"\n },\n {\n title: \"Poster II.8. Generalizing Clinical Trials with Convex Hulls\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/CLeaR_OCH_Poster.pdf\"\n },\n {\n title: \"Poster II.9. Backtracking Counterfactuals\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/Poster_Backtracking_Counterfactuals.pdf\"\n },\n {\n title: \"Poster II.10. Stochastic Causal Programming for Bounding Treatment Effects\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/CLeaR_poster_SCP.pdf\"\n },\n {\n title: \"Poster III.1. A Meta-Reinforcement Learning Algorithm for Causal Discovery\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/23_meta_rl_cd_clear_poster.pdf\"\n },\n {\n title: \"Poster III.2. Causal Inference Despite Limited Global Confounding via Mixture Models\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/CLeaR_2023_Poster_Vertical_Final.pdf\"\n },\n {\n title: \"Poster III.3. Causal Triplet: An Open Challenge for Intervention-centric Causal Representation Learning\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/poster_causal_triplet.pdf\"\n },\n {\n title: \"Poster III.4. Image-based Treatment Effect Heterogeneity\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/ImageHeterogeneity_clear.pdf\"\n },\n {\n title: \"Poster III.5. Causal Learning through Deliberate Undersampling\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/CLeaR_poster_Soloveva.pdf\"\n },\n {\n title: \"Poster III.6. Influence-Aware Attention for Multivariate Temporal Point Processes\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/Influence-Aware_Attention_for_Multivariate_Temporal_Point_Processes_CLeaR23.pdf\"\n },\n {\n title: \"Poster III.7. Evaluating Temporal Observation-Based Causal Discovery Techniques Applied to Road Driver Behaviour\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/CLeaR23_causaldiscovery_poster.pdf\"\n },\n {\n title: \"Poster III.8. Directed Graphical Models and Causal Discovery for Zero-Inflated Data\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/ZeroDAG_poster_final.pdf\"\n },\n {\n title: \"Poster III.10. Local Dependence Graphs for Discrete Time Processes\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/clear-poster-niemiro_rajkowski.pdf\"\n },\n {\n title: \"Poster IV.1. On Discovery of Local Independence over Continuous Variables via Neural Contextual Decomposition\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/CSSI_CLeaR2023_poster.pdf\"\n },\n {\n title: \"Poster IV.3. Can Active Sampling Reduce Causal Confusion in Offline Reinforcement Learning?\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/CleaR.pdf\"\n },\n {\n title: \"Poster IV.4. Causal Inference with Non-IID Data under Model Uncertainty\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/poster_3.pdf\"\n },\n {\n title: \"Poster IV.5. Factual Observation Based Heterogeneity Learning for Counterfactual Prediction\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/poster.pdf\"\n },\n {\n title: \"Poster IV.6. On the Interventional Kullback-Leibler Divergence\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/On_the_IKL_div_poster.pdf\"\n },\n {\n title: \"Poster IV.7. Unsupervised Object Learning via Common Fate\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/poster_tangemann_et_al.pdf\"\n },\n {\n title: \"Poster IV.8. Distinguishing Cause from Effect on Categorical Data: The Uniform Channel Model\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/Figueiredo_CleaR_2023_Poster.pdf\"\n },\n {\n title: \"Poster IV.9. Jointly Learning Consistent Causal Abstractions Over Multiple Interventional Distributions\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/poster1.pdf\"\n },\n {\n title: \"Poster IV.10. Causal Abstraction with Soft Interventions\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/CausalAbsractionWithSoftInterventions.pdf\"\n },\n];\n\nclass ZoomLinks extends React.Component {\n constructor(props) {\n super(props);\n this.state = {\n isAuthenticated: false,\n password: \"\",\n };\n this.handleSubmit = this.handleSubmit.bind(this);\n this.handleChange = this.handleChange.bind(this);\n }\n\n handleSubmit(event) {\n event.preventDefault();\n // This is a dummy password, replace this with your own password.\n const correctPassword = \"causality2023_p\";\n if (this.state.password === correctPassword) {\n this.setState({ isAuthenticated: true });\n } else {\n alert(\"Incorrect password\");\n }\n }\n\n handleChange(event) {\n this.setState({ password: event.target.value });\n }\n\n render() {\n if (this.state.isAuthenticated) {\n return (\n
    \n

    Registered participants can attend keynotes and oral presentations (see schedule) remotely. Please connect to this Zoom meeting:

    \n \n

    This link is used for all keynotes and orals. Note that in-person poster sessions, the townhall, panel discussion, and social activities will not be streamed.

    \n

    When participating remotely, please mute yourself (except when asking questions). Feel free to ask questions either in the chat or by raising your hand in Zoom and asking them over video.

    \n

    Virtual posters can be found below.

    \n \n \n \n \n \n \n \n \n {papers.map((paper) => (\n \n \n \n \n ))}\n \n
    Paper titleDownload link
    {paper.title}Download
    \n
    \n );\n } else {\n return (\n
    \n
    Enter password to access Zoom links and posters
    \n \n \n
    \n );\n }\n }\n}\n\n\n\nconst OnlineSchedulepage = () => {\n return (\n
    \n Online Schedule\n \n
    \n )\n}\n\nexport default OnlineSchedulepage\n\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst SocialActivities2023page = () => {\n return (\n
    \n Social Activities\n\n

    Tue, 11 Apr: Bar Night

    \n Casual pub evening, starting from 20:00 at Freistil Taproom.\n
      \n
    • For those interested, meet already at 18:30 at Neckarmüller Brewery for a beer and some dinner.
    • \n
    • They also serve a limited selection of plates at Freistil.
    • \n
    • Later in the evening, we may crawl to a couple of other bars in the center.
    • \n
    \n

    Wed, 12 Apr: Conference Dinner

    \n Official conference dinner, starting from 18:30 at Museum (upstairs from 1821).\n
      \n
    • 18:30--19:30: Reception with bar (drinks paid directly by participants)
    • \n
    • From 19:30: Buffet style dinner
    • \n
    • Dress code: wear whatever you are comfortable with
    • \n
    • You may need to pay for alcoholic beverages during dinner
    • \n
    \n

    Thu, 13 Apr: Hike

    \n Group hike to the neighbouring village Bebenhausen.\n
      \n
    • We will meet at and leave from the conference venue at 14:30.
    • \n
    • The destination is the 12th century monastery in Bebenhausen.
    • \n
    • We recommend wearing shoes suitable for hiking.
    • \n
    • Duration: ca 2.5h (45min to Bebenhausen, mostly downhill + time to walk around + 1h back, mostly uphill incl. a somewhat steep part).
    • \n
    • Whoever wants stays to explore the monastery and the palace (tickets on their own cost) need to find their way back. \n The rest of the people continue hiking on this trail.\n
    • \n
    • \n Recommendations: Download the outdoor active app on your phone and save the trail. \n We plan to move as a group but just in case someone moves apart.\n
    • \n
    \n If you have questions about any of the above activities, just ask the social co-chairs Atalanti and Julius during the conference.\n\n
    \n )\n}\n\nexport default SocialActivities2023page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst StudentVolunteers2023page = () => {\n return (\n
    \n Student Volunteers\n Danru Xu (University of Amsterdam)
    \n Mátyás Schubert (University of Amsterdam)
    \n Philip Boeken (University of Amsterdam)
    \n Teodora Pandeva (University of Amsterdam)
    \n Daan Roos (University of Amsterdam)
    \n Fan Feng (City University of Hong Kong)\n
    \n )\n}\n\nexport default StudentVolunteers2023page\n","import React from \"react\";\nimport \"./DatesTable.scss\";\n\nconst DatesTable = ({ array }) => {\n return (\n \n \n \n \n \n \n \n \n {array.map(({ name, date }) => (\n \n \n \n \n ))}\n \n
    WhatWhen
    {name}{date}
    \n );\n};\n\nexport default DatesTable;\n","import React from 'react'\nimport DatesTable from './components/DateTable/DatesTable'\nimport Title from '../../../components/Title/Title'\nimport { importantDates2024 } from '../../../common/constants/importantDates'\n\nconst ImportantDates2024page = () => {\n return (\n <>\n Important Dates\n \n \n )\n}\n\nexport default ImportantDates2024page;\n","import React from \"react\";\nimport ShowMoreText from 'react-show-more-text';\nimport \"./PaperTable.scss\";\n\nconst PaperTable = ({ array }) => {\n return (\n
    \n {array.map(({title, author, filePath, description}) => (\n \n \n

    {title}

    \n {author}

    \n Download PDF\n {description}\n \n \n ))}\n
    \n );\n};\n\nexport default PaperTable;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport PaperTable from \"./components/PaperTable/PaperTable\";\nimport acceptedDatasets from \"../../../common/constants/acceptedDatasets_2023\";\n\nconst AcceptedDatasets2023page = () => {\n return (\n //
    \n // Accepted Papers\n // TBD\n //
    \n
    \n Accepted Datasets\n \n
    \n\n );\n};\n\nexport default AcceptedDatasets2023page;\n","import React from \"react\";\nimport ShowMoreText from 'react-show-more-text';\nimport \"./PaperTable.scss\";\n\nconst PaperTable = ({ array }) => {\n return (\n
    \n {array.map(({title, author ,description}) => (\n \n \n

    {title}

    \n {author}\n {description}\n \n \n ))}\n
    \n );\n};\n\nexport default PaperTable;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport PaperTable from \"./components/PaperTable/PaperTable\";\nimport acceptedPapers_oral from \"../../../common/constants/acceptedPapers_oral2023\";\nimport acceptedPapers_poster from \"../../../common/constants/acceptedPapers_poster2023\";\nimport acceptedPapers_best from \"../../../common/constants/acceptedPapers_best2023\";\nconst AcceptedPapers2023page = () => {\n return (\n //
    \n // Accepted Papers\n // TBD\n //
    \n
    \n Accepted Papers\n
    Best Paper award
    \n \n
    Oral
    \n \n
    Poster
    \n \n
    \n );\n};\n\nexport default AcceptedPapers2023page;\n","import React from \"react\";\nimport \"./OCTable.scss\";\n\nconst OCTable = ({ array }) => {\n return (\n \n {array.map(({ position, persons }) => (\n \n \n \n ))}\n
    \n

    {position}

    \n {persons.map(({ name, href, address }) => (\n
    \n \n {name}\n \n
    \n {address}\n
    \n ))}\n
    \n );\n};\n\nexport default OCTable;\n","import React from 'react'\nimport Title from '../../../components/Title/Title'\nimport OCTable from './components/OCTable/OCTable'\nimport { array2024 } from '../../../common/constants/organizingCommittee'\n\n\nconst OrganizingCommittee2024page = () => {\n return (\n
    \n Organizing Committee\n \n
    \n )\n}\n\nexport default OrganizingCommittee2024page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst ReviewerInstruction2024page = () => {\n return (\n
    \n Reviewer Instructions\n

    Thank you for agreeing to review CLearR 2024! Your assessment is vital to creating a high quality program. This page provides the review guidelines that will help you to write reviews efficiently and effectively.

    \n
    Main tasks
    \n
      \n
    1. Preparation (by Oct 28, 2023)
    2. \n
        \n
      • CLeaR 2024 is using the OpenReview System. Please create your OpenReview profile if you do not have one and make sure it is up to date if you already have an account.
      • \n
      • Reviewer invitations will be sent via noreply@openreview.net. Please accept the reviewer invitation before the expiry date.
      • \n
      • Please read and agree to CleaR 2024 codes of conduct and declare the right conflicts of interests.
      • \n
      \n
    3. Paper bidding and assignments checking (Oct 31, 2023 - Nov 4, 2023)
    4. \n
        \n
      • Please bid on the papers that fall into your area of expertise. Your bidding is an important input to the overall matching results.
      • \n
      • Please check the assigned papers right after the paper assignment. If you do not feel qualified to review a paper or find potential conflicts of interest, please communicate with your AC as soon as possible.
      • \n
      \n
    5. Write thorough and timely reviews: (Nov 10, 2023 - Nov 29, 2023)
    6. \n
        \n
      • Please make your review as deep and detailed as possible. Superficial reviews are not really helpful in making final decisions. It is also important to treat each submission fairly and provide unbiased reviews.
      • \n
      • A review form has been designed to facilitate the review process. Please refer to the “Review Form” section for a step-by-step instruction on how to answer each question in the review form.
      • \n
      \n
    7. Discuss with authors/fellow reviewers/ACs (Dec 12, 2023 -- Dec 30, 2023)
    8. \n
        \n
      • Before the start of discussions, please carefully read author responses with an open mind to avoid possible misunderstandings. Even if the author's rebuttal does not change your opinion, please acknowledge that you have read and considered it.
      • \n
      • A further discussion with the authors will be enabled during the discussion period. If you want the authors to clarify more things after reading the rebuttal, you can discuss with them on the paper’s page.
      • \n
      • All reviewers should actively participate in discussions with fellow reviewers and ACs to have a more comprehensive understanding of each paper. The discussions are especially important for borderline papers and papers with high variance assessments. While engaging in the discussion, please be professional, polite, and keep an open mind. Although full consensus makes the final decision easier, it is not mandatory in the reviewing process, as different people may have different perspectives.
      • \n
      • If you change your opinion during or after the discussion phase, please update your ratings and give specific reasons in the final comments.
      • \n
      \n
    \n
    Review form
    \n
      \n
    1. Summary. Summarize the main contributions of each paper. The contributions may be new problems, theories, methods, algorithms, applications, benchmarks, etc.
    2. \n
    3. Main review. Please provide an in-depth review of each paper by considering the following aspects:
    4. \n
        \n
      • Originality: Does the paper provide anything new, like a new problem or a new method? Is the novelty compared to existing works well justified? Is it possible that similar ideas have been studied but the paper does not cite them properly?
      • \n
      • Significance: Does the paper address an important problem? How relevant are the results to the CLeaR community? Does the proposed theory or method significantly advance the state-of-the-art? Do the results in the paper provide new insights to the research problem? Is this paper likely to have broad impacts outside the CLeaR community, e.g., in natural/social science or engineering?
      • \n
      • Technical quality: Is the proposed approach technically sound? Are claims substantiated by theoretical and/or empirical results? Are the derivations and proofs correct? Is the proposed method unnecessarily complicated? Are the hyperparameters tuned in an appropriate manner?
      • \n
      • Clarity: Is the submission clearly written and well organized? Is the take home message easily extractable from the paper? Is the motivation well explained by illustrations and examples? Are the technical details described rigorously? Is there a significant amount of typos that make the paper hard to read?
      • \n
      \n
    5. Overall score. We use a 10-point scoring system for the overall assessment. Please select the category that best describes your assessment of the paper.
    6. \n
        \n
      • 10: Top 5% of accepted papers, seminal paper
      • \n
      • 9: Top 15% of accepted papers, strong accept
      • \n
      • 8: Top 50% of accepted papers, clear accept
      • \n
      • 7: Good paper, accept
      • \n
      • 6: Marginally above acceptance threshold
      • \n
      • 5: Marginally below acceptance threshold
      • \n
      • 4: Ok but not good enough - rejection
      • \n
      • 3: Clear rejection
      • \n
      • 2: Strong rejection
      • \n
      • 1: Trivial or wrong
      • \n
      \n
    7. Confidence score. Please select the category that best describes your confidence in the assessment of the submission.
    8. \n
        \n
      • 5: You are absolutely certain about your assessment. You are very familiar with the related work and checked the math/other details carefully.
      • \n
      • 4: You are confident in your assessment, but not absolutely certain. It is unlikely, but not impossible, that you did not understand some parts of the submission or that you are unfamiliar with some pieces of related work.
      • \n
      • 3: You are fairly confident in your assessment. It is possible that you did not understand some parts of the submission or that you are unfamiliar with some pieces of related work. Math/other details were not carefully checked.
      • \n
      • 2: You are willing to defend your assessment, but it is quite likely that you did not understand central parts of the submission or that you are unfamiliar with some pieces of related work. Math/other details were not carefully checked.
      • \n
      • 1: Your assessment is an educated guess. The submission is not in your area or the submission was difficult to understand. Math/other details were not carefully checked.
      • \n
      \n
    \n
    Policies
    \n

    Confidentiality. By reviewing CleaR 2024, you must agree to keep all material and information related to the review confidential. In particular, you must not use ideas and results from submitted papers in your own research or distribute them to others. You should delete all reviewing material, such as the submitted code, at the end of the reviewing cycle. You should not talk about submissions or content related to the reviewing of submissions to anyone without prior approval from the program chairs.

    \n

    Double-blind reviewing. The CLeaR review process is double-blind: reviewers and authors will both stay anonymous to each other during the review process. However, author names will be visible to area chairs and program chairs. Authors are responsible for anonymizing their submissions. Submissions may not contain any identifying information that may violate the double-blind reviewing policy. If you are assigned a submission that is not adequately anonymized, please contact the corresponding AC. Also, you should not attempt to find out the identities of authors for any of your assigned submissions, e.g., by searching arXiv preprints. Reviewer names are visible to the area chair (and program chairs), but the reviewers will not know names of other reviewers. Please do not disclose your identity to authors and fellow reviewers in the discussions.

    \n

    Dual Submissions.CLeaR does not allow double submissions. Namely, submissions should not have been previously published in or submitted to a journal or the proceedings of another conference at any point during the CLeaR review process. Submissions as extended abstracts (5 pages or less), to workshops or non-archival venues (without a proceedings), will not be considered a concurrent submission. Authors may submit anonymized work to CLeaR that is already available as a preprint (e.g., on arXiv) without citing it. If you suspect that a submission that has been assigned to you is a dual submission or if you require further clarification, please contact the corresponding AC. Please see Call for Papers for more information about dual submissions.

    \n

    Violations of formatting instructions. Submissions are limited to 12 single-column PMLR-formatted pages, plus unlimited additional pages for references and appendices. Authors of accepted papers will have the option of opting out of the proceedings in favor of a 1-page extended abstract, which will point to an open access archival version of the full paper reviewed for CLeaR. If you are assigned a paper that is overlength or appears to violate the CLeaR proceedings format (e.g., by decreasing margins or font size, by removing some pre-fixed spaces, etc), please notify the corresponding AC immediately.

    \n \n

    * Please also review the policies in the CLeaR 2024 Call for Papers.

    \n
    \n )\n}\n\nexport default ReviewerInstruction2024page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport OCTable from '../OrganizingCommitteePage/components/OCTable/OCTable'\nimport {array ,array1} from '../../../common/constants/advisoryboard'\n\nconst AdvisoryBoard2023page = () => {\n return (\n
    \n Advisory Board\n \n \n \n \n \n
    \n
    \n )\n}\n\nexport default AdvisoryBoard2023page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport './SponsorsPage.scss';\n\nconst Sponsors2024page = () => {\n return (\n
    \n Sponsors\n
    \n
    Gold Sponsors
    \n \n \n \n \n
    \n
    Silver Sponsors
    \n \n \n {/* */}\n \n \n
    \n
    Bronze Sponsors
    \n \n \n \n \n
    \n
    \n
    \n )\n}\n\nexport default Sponsors2024page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst AreaChair2024page = () => {\n return (\n
    \n Area Chairs\n

    TBD

    \n
    \n )\n}\n\nexport default AreaChair2024page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst ProgramCommittee2024page = () => {\n return (\n
    \n Program Committee\n \n TBD\n
    \n )\n}\n\nexport default ProgramCommittee2024page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport DatesTable from \"../ImportantDatesPage/components/DateTable/DatesTable\";\nimport { importantDates2024 } from \"../../../common/constants/importantDates\";\nimport \"./CLeaR2024Page.scss\";\n\nconst CLeaR2024page = () => {\n return (\n
    \n CLeaR (Causal Learning and Reasoning) 2024\n
    \n

    \n Causality is a fundamental notion in science and engineering. In the past few decades, some of the most influential developments in the study of causal discovery, causal inference, and the causal treatment of machine learning have resulted from cross-disciplinary efforts. In particular, a number of machine learning and statistical analysis techniques have been developed to tackle classical causal discovery and inference problems. On the other hand, the causal view has been shown to be able to facilitate formulating, understanding, and tackling a number of hard machine learning problems in transfer learning, reinforcement learning, and deep learning.\n

    \n

    \n We invite submissions to the 3rd conference on Causal Learning and Reasoning (CLeaR), and welcome paper submissions that describe new theory, methodology, and/or applications relevant to any aspect of causal learning and reasoning in the fields of artificial intelligence and statistics. Submitted papers will be evaluated based on their novelty, technical quality, and potential impact. Experimental methods and results are expected to be reproducible, and authors are strongly encouraged to make code and data available. We also encourage submissions of proof-of-concept research that puts forward novel ideas and demonstrates potential for addressing problems at the intersection of causality and machine learning. \n CLeaR 2024 will be held at the Pallisades Room, 3rd Floor, Carnesale Commons, UC Los Angeles, California from April 1 to 3, 2024.\n

    \n Topics of submission may include, but are not limited to:\n
      \n
    • Machine learning building on causal principles
    • \n
    • Causal discovery in complex environments
    • \n
    • Efficient causal discovery in large-scale datasets
    • \n
    • Causal effect identification and estimation
    • \n
    • Causal generative models for machine learning
    • \n
    • Unsupervised and semi-supervised deep learning connected to causality
    • \n
    • Machine learning with heterogeneous data sources
    • \n
    • Benchmark for causal discovery and causal reasoning
    • \n
    • Reinforcement learning
    • \n
    • Fairness, accountability, transparency, explainability, trustworthiness, and recourse
    • \n
    • Applications of any of the above to real-world problems
    • \n
    • Causal representation learning
    • \n
    • Causal inference in philosophy and psychology
    • \n
    \n

    Different from last year, there will be a single track for all submissions (no separate call for dataset submissions). The program at the conference will highlight new applications of causality to real-world problems, benchmark datasets, or benchmark methodologies.

    \n
    \n

    \n
    \n
    Important Dates
    \n \n
    \n
    \n );\n};\n\nexport default CLeaR2024page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst FullAgenda2024page = () => {\n return (\n
    \n Full Agenda\n
    Day 1 (April 1, Monday)
    \n \n 9:00-10:30 AM Arrival + registration + breakfast \n \n 10:30-11:00 AM Welcome\n \n 11:00-12:00 PM Keynote by Eric Tchetgen Tchetgen\n
      \n
    • Introducing the Forster-Warmuth Nonparametric Counterfactual Regression
    • \n
    \n \n 12:00-14:00 PM Lunch break\n \n 14:00-15:00 PM Oral I and Benchmark spotlights\n
      \n
    • Oral I.1. (15 mins) Best Paper (TBA)
    • \n
    • Oral I.2. (15 mins) Extracting the Multiscale Causal Backbone of Brain Dynamics
    • \n
    • Oral I.3. (15 mins) Bicycle: Intervention-Based Causal Discovery with Cycles
    • \n
    • Poster S.1. (5 mins) CausalAssembly: Generating Realistic Production Data for Benchmarking Causal Discovery
    • \n
    • Poster S.2. (5 mins) The PetShop Dataset — Finding Causes of Performance Issues across Microservices
    • \n
    • Poster S.3. (5 mins) Causal discovery in a complex industrial system: A time series benchmark
    • \n
    \n 15:00-15:30 PM Coffee break\n \n 15:30-17:00 PM Poster I\n
      \n
    • Poster I.1. Structure Learning with Continuous Optimization: A Sober Look and Beyond
    • \n
    • Poster I.2. A causality-inspired plus-minus model for player evaluation in team sports
    • \n
    • Poster I.3. Inference of nonlinear causal effects with GWAS summary data
    • \n
    • Poster I.4. Implicit and Explicit Policy Constraints for Offline Reinforcement Learning
    • \n
    • Poster I.5. Estimating the Causal Effect of Early ArXiving on Paper Acceptance
    • \n
    • Poster I.6. Bootstrap aggregation and confidence measures to improve time series causal discovery
    • \n
    • Poster I.7. Extracting the Multiscale Causal Backbone of Brain Dynamics
    • \n
    • Poster I.8. The PetShop Dataset — Finding Causes of Performance Issues across Microservices
    • \n
    • Poster I.9. Evaluating and Correcting Performative Effects of Decision Support Systems via Causal Domain Shift
    • \n
    • Poster I.10. Causal Discovery Under Local Privacy
    • \n
    • Poster I.11. Causal Layering via Conditional Entropy
    • \n
    • Poster I.12. Causal Discovery with Mixed Linear and Nonlinear Additive Noise Models: A Scalable Approach
    • \n
    • Poster I.13. Causal discovery in a complex industrial system: A time series benchmark
    • \n
    • Poster I.14. Bicycle: Intervention-Based Causal Discovery with Cycles
    • \n
    • Poster I.15. CausalAssembly: Generating Realistic Production Data for Benchmarking Causal Discovery
    • \n
    \n 17:00-18:00 PM Keynote by Bernhard Schölkopf\n
      \n
    • Learning Causal Representations
    • \n
    \n \n
    \n
    \n
    Day 2 (April 2, Tuesday)
    \n \n 9:00-9:30 AM Arrival + breakfast \n \n 9:30-10:00 AM Oral II\n
      \n
    • Oral II.1. Cautionary Tales on Synthetic Controls in Survival Analyses
    • \n
    • Oral II.2. Fundamental Properties of Causal Entropy and Information Gain
    • \n
    \n 10:00-10:30 AM Coffee break\n \n 10:30-12:00 PM Poster II\n
      \n
    • Poster II.1. Sequential Deconfounding for Causal Inference with Unobserved Confounders
    • \n
    • Poster II.2. Cautionary Tales on Synthetic Controls in Survival Analyses
    • \n
    • Poster II.3. Fundamental Properties of Causal Entropy and Information Gain
    • \n
    • Poster II.4. Causality of Functional Longitudinal Data
    • \n
    • Poster II.5. Causal Matching using Random Hyperplane Tessellations
    • \n
    • Poster II.6. Lifted Causal Inference in Relational Domains
    • \n
    • Poster II.7. Scalable Counterfactual Distribution Estimation in Multivariate Causal Models
    • \n
    • Poster II.8. Pragmatic Fairness: Developing Policies with Outcome Disparity Control
    • \n
    • Poster II.9. Dual Likelihood for Causal Inference under Structure Uncertainty
    • \n
    • Poster II.10. On the Identifiability of Quantized Factors
    • \n
    • Poster II.11. Meaningful Causal Aggregation and Paradoxical Confounding
    • \n
    • Poster II.12. Semiparametric Efficient Inference in Adaptive Experiments
    • \n
    • Poster II.13. Causal Imputation for Counterfactual SCMs: Bridging Graphs and Latent Factor Models
    • \n
    • Poster II.14. Causal State Distillation for Explainable Reinforcement Learning
    • \n
    • Poster II.15. Expediting Reinforcement Learning by Incorporating Knowledge About Temporal Causality in the Environment
    • \n
    \n 12:00-13:30 PM Lunch break\n \n 13:30-14:30 PM Keynote by Tania Lombrozo\n
      \n
    • Simplicity and Stability in Human Causal Reasoning
    • \n
    \n \n 14:30-18:00 PM Social activity\n \n 18:00 PM Dinner\n \n 19:30-20:00 PM Keynote by Judea Pearl\n \n
    \n
    \n
    Day 3 (April 3, Wednesday)
    \n \n 9:00-9:30 AM Arrival + breakfast \n \n 9:30-10:30 AM Keynote by Elizabeth Tipton\n
      \n
    • Integrating diverse evidence: From science to policymaking
    • \n
    \n \n 10:30-11:00 AM Townhall discussion\n \n 11:00-12:00 PM Oral III\n
      \n
    • Oral III.1. Pragmatic Fairness: Developing Policies with Outcome Disparity Control
    • \n
    • Oral III.2. Dual Likelihood for Causal Inference under Structure Uncertainty
    • \n
    • Oral III.3. Causal State Distillation for Explainable Reinforcement Learning
    • \n
    • Oral III.4. Finding Alignments Between Interpretable Causal Variables and Distributed Neural Representations
    • \n
    \n 12:00-14:00 PM Lunch break\n \n 14:00-15:00 PM Oral IV\n
      \n
    • Oral IV.1. Towards the Reusability and Compositionality of Causal Representations
    • \n
    • Oral IV.2. An Interventional Perspective on Identifiability in Gaussian LTI Systems with Independent Component Analysis
    • \n
    • Oral IV.3. Ensembled Prediction Intervals for Causal Outcomes Under Hidden Confounding
    • \n
    \n 15:00-15:30 PM Coffee Break\n \n 15:30-17:00 PM Poster III\n
      \n
    • Poster III.1. Towards the Identifiability of Comparative Deep Generative Models
    • \n
    • Poster III.2. Robustness of Algorithms for Causal Structure Learning to Hyperparameter Choice
    • \n
    • Poster III.3. Finding Alignments Between Interpretable Causal Variables and Distributed Neural Representations
    • \n
    • Poster III.4. DiConStruct: Causal Concept-based Explanations through Black-Box Distillation
    • \n
    • Poster III.5. Monitoring the performance of machine learning algorithms that induce feedback loops: what is the causal estimand?
    • \n
    • Poster III.6. Towards the Reusability and Compositionality of Causal Representations
    • \n
    • Poster III.7. Hyperparameter Tuning for Causal Inference with Double Machine Learning: A Simulation Study
    • \n
    • Poster III.8. Identifying Linearly-Mixed Causal Representations from Multi-Node Interventions
    • \n
    • Poster III.9. On the Lasso for Graphical Continuous Lyapunov Models
    • \n
    • Poster III.10. Causal Optimal Transport of Abstractions
    • \n
    • Poster III.11. An Interventional Perspective on Identifiability in Gaussian LTI Systems with Independent Component Analysis
    • \n
    • Poster III.12. Ensembled Prediction Intervals for Causal Outcomes Under Hidden Confounding
    • \n
    • Poster III.13. Low-Rank Approximation of Structural Redundancy for Self-Supervised Learning
    • \n
    • Poster III.14. Confounded Budgeted Causal Bandits
    • \n
    • Poster III.15. On the Impact of Neighbourhood Sampling to Satisfy Sufficiency and Necessity Criteria in Explainable AI
    • \n
    \n 17:00-17:30 PM Wrap-up\n \n
    \n
    \n
    \n )\n}\n\nexport default FullAgenda2024page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst KeynoteSpeaker2023page = () => {\n return (\n
    \n Plenary Speakers\n \n\n - Aapo Hyvarinen, University of Helsinki\n Title: Causal discovery and latent-variable models \n

    Abstract: There is a deep connection between causal discovery and latent-variable models such as factor analysis, independent component analysis, and various unsupervised deep learning models. In several cases, estimation of a latent-variable model enables causal discovery, due to a kind of equivalence of the two in a purely observational regime. The key concept here is identifiability: We have to find a latent-variable model which is identifiable, i.e. the parameters of the model can be uniquely estimated. Quite often, the identifiability of a latent-variable model then leads to identifiability of a causal discovery model. In this talk, I will review research on this connection and the relevant identifiability theory.

    \n\n\n - Miguel Hernan, CAUSALab / Biostatistics and Epidemiology, Harvard T.H. Chan School \n Title: Causal AI for data analysis in the health sciences \n

    Abstract: The tools referred to as AI may assist, or replace, health researchers who learn from data. This talk describes a taxonomy of learning tasks in science and explores the relationship between two of them: prediction (pattern recognition) and counterfactual prediction (causal inference). Researchers predict counterfactually by using a combination of data and causal models of the world. In contrast, AI tools developed for prediction using only data are being increasingly used for counterfactual prediction. This raises questions about the origin of causal models, and the future of causal inference research in the health sciences.

    \n\n\n - Negar Kiyavash, École polytechnique fédérale de Lausanne \n Title: Causal Identification: Are We There Yet?\n

    Abstract: We discuss causal identifiability, the canonical problem of causal inference, where the goal is to calculate the effect of intervening on subset of variables on an outcome variable of interest. We first visit the definition fo the problem and note that it is necessary to add positivity assumption of observational distribution to the original definition of the problem as without such an assumption the rules of do-calculus and consequently the proposed algorithms in the field are not sound. After discussing state of the art and recent progress in the field, we present some of the open problems and remaining challenges.

    \n\n
    \n
    \n )\n}\n\nexport default KeynoteSpeaker2023page","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\n\nconst CallforPapers2024page = () => {\n return (\n
    \n Call for Papers\n\n

    We invite submissions to the 3rd Conference on Causal Learning and Reasoning (CLeaR), and welcome paper submissions that describe new theory, methodology, and/or applications relevant to any aspect of causal learning and reasoning in the fields of artificial intelligence and statistics. Accepted papers will be published in the Proceedings of Machine Learning Research (PMLR). \n

    \n\t
    Key dates
    \n\tThe planned dates are as follows:\n\t
      \n\t
    • Paper submission deadline: Oct 27, 2023 11:59pm (Anywhere on Earth, AoE)
    • \n\t
    • Reviews released: Dec 1, 2023
    • \n\t
    • Author rebuttals due: Dec 8, 2023 11:59pm (AoE)
    • \n\t
    • Final decisions: Jan 12, 2024
    • \n\t
    • Camera-ready deadline: Feb 20, 2024 11:59pm (AoE)
    • \n\t
    • Conference dates: Apr 1 (Mon) - 3 (Wed), 2024.
    • \n\t
    \n\t

    Submit at https://openreview.net/group?id=cclear.cc/CLeaR/2024/Conference.

    \n\n\t
    Summary
    \n\t

    Causality is a fundamental notion in science and engineering. In the past few decades, some of the most influential developments in the study of causal discovery, causal inference, and the causal treatment of machine learning have resulted from cross-disciplinary efforts. In particular, a number of machine learning and statistical analysis techniques have been developed to tackle classical causal discovery and inference problems. On the other hand, the causal view has been shown to facilitate formulating, understanding, and tackling a broad range of problems, including domain generalization, robustness, trustworthiness, and fairness across machine learning, reinforcement learning, and statistics.

    \n\n\t

    We invite papers that describe new theory, methodology and/or applications relevant to any aspect of causal learning and reasoning in the fields of artificial intelligence and statistics. Submitted papers will be evaluated based on their novelty, technical quality, and potential impact. Experimental methods and results are expected to be reproducible, and authors are strongly encouraged to make code and data available. We also encourage submissions of proof-of-concept research that puts forward novel ideas and demonstrates potential for addressing problems at the intersection of causality and machine learning.

    \n\t\n\t
    Paper Submission
    \n\t

    The proceedings track is the standard CLeaR paper submission track. Papers will be selected via a rigorous double-blind peer-review process. All accepted papers will be presented at the Conference as contributed talks or as posters and will be published in the Proceedings.

    \n\t

    \n\tTopics of submission may include, but are not limited to:\n\t

      \n\t
    • Machine learning building on causal principles
    • \n\t
    • Causal discovery in complex environments
    • \n\t
    • Efficient causal discovery in large-scale datasets
    • \n\t
    • Causal effect identification and estimation
    • \n\t
    • Causal generative models for machine learning
    • \n\t
    • Unsupervised and semi-supervised deep learning connected to causality
    • \n\t
    • Machine learning with heterogeneous data sources
    • \n\t
    • Benchmark for causal discovery and causal reasoning
    • \n\t
    • Reinforcement learning
    • \n\t
    • Fairness, accountability, transparency, explainability, trustworthiness, and recourse
    • \n\t
    • Applications of any of the above to real-world problems
    • \n\t
    • Foundational theories of causation
    • \n\t
    • Causal representation learning
    • \n\t
    \n\t

    \n\n\t
    Physical Attendance
    \n\t

    The CLeaR 2024 organizing committee prioritizes the safety and health of our community. We are still considering the format of the CLeaR 2024 conference. It will be preferably held as a hybrid conference with no mandatory physical attendance, but we also keep a backup plan of making the conference virtual in case of new pandemic situations. After our final decision, we will announce the format of the conference on the website. Thank you for your patience and understanding.

    \n\n\t
    Formatting and Supplementary Material
    \n\t

    Submissions are limited to 12 single-column PMLR-formatted pages, plus unlimited additional pages for references and appendices. Authors of accepted papers will have the option of opting out of the proceedings in favor of a 1-page extended abstract, which will point to an open access archival version of the full paper reviewed for CLeaR. You can also submit a single file of additional supplementary material separately, which may be either a pdf file (containing proof details, for instance) or a zip file that can include multiple files of all formats (such as code or videos). Note that reviewers are under no obligation to examine the supplementary material.

    \n\n\t

    Please format the paper using the official LaTeX style files. We do not support submission in formats other than LaTeX. Please do not modify the layout given by the style file.

    \n\n\n\t
    Anonymization Requirements
    \n\t

    The CLeaR review process is double-blind: reviewers and authors will both stay anonymous to each other during the review process. We use OpenReview to host papers; however, public discussions are not allowed during the review process. The review comments are only visible to program chairs, area chairs, and reviewers with submitted review comments. Papers will be desk-rejected if they contain any information that can violate the double-blind reviewing policy, such as the author names or their affiliations, acknowledgements, or links that can infer any author’s identity or institution. Self-citations are allowed as long as anonymity is preserved. It is up to the author’s discretion how best to preserve anonymity when including self-citations. Possibilities include: leaving out a self-citation, including it but replacing the citation text with “removed for anonymous submission,” or leaving the citation as-is. We recommend leaving in a moderate number of self-citations for published or otherwise well-known work.

    \n\n\t

    Revisions are allowed in the submission system until the paper submission deadline. Changes will not be allowed afterwards.

    \n\n\t

    We strongly discourage advertising the preprint on social media or in the press while under submission to CLeaR. Preprints must not be explicitly identified as an CLeaR submission at any time during the review period (i.e., from the abstract submission deadline until the notification of the accept/reject decision).

    \n\n\t
    Dual Submissions
    \n\t

    CLeaR does not allow double submissions. Namely, submissions should not have been previously published in or submitted to a journal or the proceedings of another conference at any point during the CLeaR review process. Submissions to workshops or other non-archival venues (without a proceedings) will not be considered as dual submissions. Submissions as extended abstracts with 5 pages or less will not be considered a concurrent submission either. Authors may submit anonymized work to CLeaR that is already available as a preprint (e.g., on arXiv) without citing it. Tech reports (including reports on sites such as arXiv) do not count as prior publication. It is acceptable to have a substantially extended version of the submitted paper under consideration simultaneously for journal publication, so long as the journal version’s planned publication date is after our publication (April 13, 2024, tentatively), and it does not violate the journal's policy, the journal submission does not interfere with CLeaR right to publish the paper, and the situation is clearly described at the time of CLeaR submission. Please describe the situation in the appropriate box on the submission page (and do not include author information in the submission itself, to avoid accidental unblinding). Authors are also allowed to give talks to restricted audiences on the work(s) submitted to CLeaR during the review.

    \n\n\t

    All accepted papers will be presented at the Conference either as contributed talks or as posters, and will be published in the CLeaR Conference Proceedings in the Journal of Machine Learning Research Workshop and Conference Proceedings series. Papers for talks and posters will be treated equally in publication.

    \n\n\t
    Confidentiality
    \n\t

    The reviewers and area-chairs will have access to papers and supplementary materials that are assigned to them.

    \n\t\n\t

    The program chairs and workflow chairs will have access to all the papers. Everyone having access to papers and supplementary materials will be instructed to keep them confidential during the review process and delete them after the final decisions.

    \n\t\n\t

    Reviews will be visible to area chairs, program chairs, and workflow chairs throughout the process. At any stage of the process, author names will not be known to the reviewers or area chairs, but only visible to program chairs. Reviewer names are visible to the area chair (and program chairs), but the reviewers will not know names of other reviewers.

    \n\n\t

    Vanessa Didelez & Francesco Locatello
    \n\tCLeaR 2024 Program Chairs

    \n\n\t

    Aditya Grover & Cheng Zhang
    \n\tCLeaR 2024 General Chairs

    \n
    \n )\n}\n\nexport default CallforPapers2024page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst ACInstruction2024page = () => {\n return (\n
    \n AC Instructions\n

    Thank you for serving as an area chair for CLeaR 2024. As an area chair, your job is to (1) ensure that all the submissions you are assigned have high-quality reviews and good discussions, and (2) write quality meta-reviews and make acceptance decisions. If there is any question, please contact the program chairs chairs.clear2024@gmail.com. The main tasks of area chairs are listed below.

    \n
    Main tasks
    \n
      \n
    1. Preparation (by Oct 28, 2023)
    2. \n
        \n
      • CLeaR 2024 is using the OpenReview System. Please create your OpenReview profile if you do not have one and make sure it is up to date if you already have an account.
      • \n
      • Please read and agree to CleaR 2024 codes of conduct and declare the right conflicts of interests.
      • \n
      • In addition to the guidelines below, please be familiar with the reviewer instructions. You will be interacting significantly with reviewers, so please make sure you understand what is expected of them.
      • \n
      \n
    3. Bid on papers (Oct 31, 2023 - Nov 4, 2023)
    4. \n
        \n
      • Log into OpenReview and bid on submissions that fall into your area of expertise. Your bidding is an important input to the overall matching results.
      • \n
      \n
    5. Check reviewer assignment (Nov 7, 2023 - Nov 9, 2023)
    6. \n
        \n
      • Make sure that every submission in your batch is matched with suitable reviewers before the reviewing process starts.
      • \n
      • If you notice a conflict of interest with a submission that is assigned to you, please contact the program chairs immediately so that the paper will be reassigned.
      • \n
      • You can invite extra reviewers, either in the existing pool or by inviting external reviewers, even after the reviewing process starts.
      • \n
      \n
    7. Make sure all papers have quality reviews (Nov 29, 2023 - Dec 2, 2023)
    8. \n
        \n
      • Initial reviews are due Monday, Nov 29. You might need to send multiple reminder emails. If a reviewer is unable to deliver a review, please find a replacement reviewer who is able to do emergency reviews.
      • \n
      • Read all reviews carefully. If a review is substandard, you should ask the reviewer to improve their review.
      • \n
      \n
    9. Discuss with reviewers and authors (Dec 12, 2023 -- Dec 30, 2023)
    10. \n
        \n
      • As soon as the discussion period starts, initiate and lead a discussion via OpenReview for each submission, and make sure the reviewers engage in the discussion phase.
      • \n
      • Make sure your reviewers read and respond to all author responses.
      • \n
      • A further discussion with the authors will be enabled during the discussion period. Please be cautious when posting a comment about who can see the discussions, only the reviewers or both reviewers and authors.
      • \n
      \n
    11. Make accept/reject decisions: (Jan 2, 2024 - Jan 9, 2024)
    12. \n
        \n
      • Write a meta-review that explains your decision (accept or reject) to the authors. Your comments should augment the reviews, and explain how the reviews, author response, and discussion were used to arrive at your decision. Do not dismiss or ignore a review unless you have a good reason for doing so. If the reviewers cannot come to a consensus, you should read the paper carefully and write a detailed meta-review.
      • \n
      • If you cannot make a decision, please reach out to the program chairs at your earliest convenience.
      • \n
      \n
    \n \n

    * Please review the policies in the CLeaR 2024 Call for Papers and Reviewer Instructions.

    \n
    \n )\n}\n\nexport default ACInstruction2024page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst CodeConduct2023page = () => {\n return (\n
    \n Code of Conduct\n
    Purpose
    \n

    At CLeaR 2023, our goal is to create a community and environment that recognizes and respects the intrinsic value of everyone, which is essential for the open exchange of ideas, freedom of thought and expression, and respectful scientific debate at the conference.

    \n
    Who
    \n

    All participants, including but not limited to organizers, reviewers, speakers, sponsors, and volunteers at our Conference and Conference-sponsored social events are required to agree with this Code of Conduct both during an event and on official communication channels, including social media. In particular, sponsors should not use sexual, racial, or other offensive images, events, or other materials. This code applies to both official sponsors and any organization that uses the conference name as a brand as part of its activities during or around the conference.

    \n
    Policy
    \n

    CLeaR is committed to providing all participants with an experience free from harassment, bullying, discrimination and retaliation. This includes offensive comments related to age, gender, gender identity and expression, race, sexual orientation, physical or intellectual disability, physical appearance, body type, ethnicity, religion, politics, technical choices, or any other personal characteristics. No tolerance for bullying, intimidation, personal assault, harassment, continuous interruption of conversations or other activities, and behavior that interferes with the full participation of other participants. This includes sexual harassment, stalking, stalking, harassing photography or recording, inappropriate physical contact, unwelcome sexual attention, vulgar communication, and diminutive characteristics. The policies apply to actual meeting sites and conference venues, including both physical venues, online venues, and official virtual platforms, including but not limited to Openreview comments, video, virtual streaming, Q&A tools. For example, offensive or even threatening comments in Open review are prohibited. Also, Zoom bombing or any virtual activities that have nothing to do with the topic of discussion and that are detrimental to the purpose of the topic or program are not allowed.

    \n
    Action
    \n

    Participants who are asked by any member of the community to stop any improper behavior defined here should immediately comply. Meeting organizers may take further actions at their discretion, including: formally or informally warning offenders, expelling the meeting without refunds, refusing to submit or participate in future CLeaR meetings, reporting the incident to the offender’s local agency or funding agency, or reporting to the local Authorities or law enforcement agencies report the incident. The ''just kidding'' response is unacceptable. If action is taken, an appeal procedure will be provided.

    \n
    Complaint reporting
    \n

    If you have any concerns about possible violation of the policies, please contact the conference chairs (chairs.clear2023@gmail.com) as soon as possible. Reports made during the conference will be responded to within 24 hours; those at other times in less than two weeks. Complaints and violations will be handled with discretion. We are ready and eager to help attendees contact relevant help services, escort them to a safe place, or to otherwise help people who have been harassed feel safe during the meeting. We gratefully accept feedback from the CLeaR community on our policy and actions.

    \n

    A similar version has been used by other conferences, such as ICLR 2020 and UAI 2020.

    \n
    \n\n );\n};\n\nexport default CodeConduct2023page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst CodeConduct2024page = () => {\n return (\n
    \n Conflicts of Interest\n

    This page defines what constitutes a conflict of interest for the CLeaR 2024 review process.

    \n

    If you are an author, reviewer, or area chair, please make sure to create or update your OpenReview profile. You will be asked to declare two types of conflicts---domain conflicts and personal conflicts. Both types are declared by filling out appropriate sections of your OpenReview profile, as described below.

    \n
      \n
    • Domain conflicts (entered in Education & Career History)
    • \n Please ensure that this section accurately represents your domain conflicts for (at least) the last three years. When you enter a domain conflict, none of your submissions will be visible to reviewers or area chairs who have also entered this domain conflict. Only the last three years of your and their Education & Career History will be used.\n
    • Personal conflicts (entered in Advisors, Relations & Conflicts)
    • \n You should also enter your personal conflicts, including (1) family or close personal relationship, (2) Ph.D. advisee/advisor relationship, and (3) current, frequent, or recent collaboration (including internships) within the past three years.\n If you have any questions about special circumstances not discussed above, please contact program chairs at chairs.clear2024@gmail.com.\n
    \n
    \n );\n};\n\nexport default CodeConduct2024page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\n\nconst CameraReady2023page = () => {\n return (\n
    \n Camera-ready Instructions\n

    TBD

    \n \n
    \n );\n};\n\nexport default CameraReady2023page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport './RegistrationPage.scss';\n\nconst Registration2024page = () => {\n return (\n
    \n Venue and Registration\n
    Venue
    \n

    CLeaR 2024 will be held at Carnesale Commons, UC Los Angeles, California from April 1 to 3, 2024. We encourage participation in person, but the conference also includes virtual components.

    \n
    Registration
    \n

    You can register for CLeaR 2024 here. Registration will be open until March 15, 2024. As we have a limited number of available tickets, please register as soon as possible. Please contact us if you need an invitation letter for a visa application.

    \n
    \n )\n}\n\nexport default Registration2024page\n\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst PresentationInstruction2023page = () => {\n return (\n
    \n Presentation Instructions\n
    Instructions for orals:
    \n
      \n
    • Oral presentations are 17min + 3min for questions
    • \n
    • Every paper accepted as oral should also prepare and present a poster
    • \n
    \n
    Instructions for posters:
    \n
      \n
    • The poster-boards cover A0 in portrait orientation, please aim for a size between A1 and A0
    • \n
    \n
    Remote presentation:
    \n

    We highly recommend to attend the conference in-person, even more so for presenting your accepted papers.

    \n

    Please contact us at zietld@amazon.com in case you cannot present (oral or poster) in-person (subject: [CleaR 2023 remote]).

    \n
    \n )\n}\n\nexport default PresentationInstruction2023page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst ZoomLink = ({ topic, time, link, meetingId, passcode, localNumberLink }) => (\n
    \n
    {topic}
    \n

    Time: {time}

    \n

    Join Zoom Meeting:

    \n

    {link}

    \n

    Meeting ID: {meetingId}

    \n

    Passcode: {passcode}

    \n

    Find your local number

    \n
    \n);\n\n\nconst papers = [\n {\n title: \"Poster I.1. Sample-Specific Root Causal Inference with Latent Variables\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/CLeaR_EEL_Poster.pdf\"\n },\n {\n title: \"Poster I.2. Causal Discovery for Non-stationary Non-linear Time Series Data Using Just-In-Time Modeling\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/CLeaR2023PosterComplete.pdf\"\n },\n {\n title: \"Poster I.3. Causal Models with Constraints\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/CLeaR_poster-5.pdf\"\n },\n {\n title: \"Poster I.4. Non-parametric identifiability and sensitivity analysis of synthetic control models\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/CLeaR2023_%20Spotify-7.pdf\"\n },\n {\n title: \"Poster I.6. Estimating long-term causal effects from short-term experiments and long-term observational data with unobserved confounding\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/CLeaR2023_Poster_VanGoffrier.pdf\"\n },\n {\n title: \"Poster I.7. Learning Causal Representations of Single Cells via Sparse Mechanism Shift Modeling\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/Learning_Causal_Representations_of_Single_Cells_via_Sparse_Mechanism_Shift_Modeling.pdf\"\n },\n {\n title: \"Poster I.9. Instrumental Processes Using Integrated Covariances\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/posterI10.pdf\"\n },\n {\n title: \"Poster II.1. Branch-Price-and-Cut for Causal Discovery\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/poster_1.pdf\"\n },\n {\n title: \"Poster II.3. Beyond the Markov Equivalence Class: Extending Causal Discovery under Latent Confounding\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/poster_CLeaR_2023_final.pdf\"\n },\n {\n title: \"Poster II.4. Learning Conditional Granger Causal Temporal Networks\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/Learning_Conditional_Granger_Causal_Temporal_Networks.pdf\"\n },\n {\n title: \"Poster II.5. Practical Algorithms for Orientations of Partially Directed Graphical Models\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/Poster_Practical_Algorithms_for_Orientations_of_Partially_Directed_Graphical_Models.pdf\"\n },\n {\n title: \"Poster II.6. Enhancing Causal Discovery from Robot Sensor Data in Dynamic Scenarios\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/Postercopy.pdf\"\n },\n {\n title: \"Poster II.7. Leveraging Causal Graphs for Blocking in Randomized Experiments\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/CleaR2023-poster.pdf\"\n },\n {\n title: \"Poster II.8. Generalizing Clinical Trials with Convex Hulls\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/CLeaR_OCH_Poster.pdf\"\n },\n {\n title: \"Poster II.9. Backtracking Counterfactuals\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/Poster_Backtracking_Counterfactuals.pdf\"\n },\n {\n title: \"Poster II.10. Stochastic Causal Programming for Bounding Treatment Effects\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/CLeaR_poster_SCP.pdf\"\n },\n {\n title: \"Poster III.1. A Meta-Reinforcement Learning Algorithm for Causal Discovery\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/23_meta_rl_cd_clear_poster.pdf\"\n },\n {\n title: \"Poster III.2. Causal Inference Despite Limited Global Confounding via Mixture Models\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/CLeaR_2023_Poster_Vertical_Final.pdf\"\n },\n {\n title: \"Poster III.3. Causal Triplet: An Open Challenge for Intervention-centric Causal Representation Learning\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/poster_causal_triplet.pdf\"\n },\n {\n title: \"Poster III.4. Image-based Treatment Effect Heterogeneity\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/ImageHeterogeneity_clear.pdf\"\n },\n {\n title: \"Poster III.5. Causal Learning through Deliberate Undersampling\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/CLeaR_poster_Soloveva.pdf\"\n },\n {\n title: \"Poster III.6. Influence-Aware Attention for Multivariate Temporal Point Processes\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/Influence-Aware_Attention_for_Multivariate_Temporal_Point_Processes_CLeaR23.pdf\"\n },\n {\n title: \"Poster III.7. Evaluating Temporal Observation-Based Causal Discovery Techniques Applied to Road Driver Behaviour\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/CLeaR23_causaldiscovery_poster.pdf\"\n },\n {\n title: \"Poster III.8. Directed Graphical Models and Causal Discovery for Zero-Inflated Data\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/ZeroDAG_poster_final.pdf\"\n },\n {\n title: \"Poster III.10. Local Dependence Graphs for Discrete Time Processes\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/clear-poster-niemiro_rajkowski.pdf\"\n },\n {\n title: \"Poster IV.1. On Discovery of Local Independence over Continuous Variables via Neural Contextual Decomposition\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/CSSI_CLeaR2023_poster.pdf\"\n },\n {\n title: \"Poster IV.3. Can Active Sampling Reduce Causal Confusion in Offline Reinforcement Learning?\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/CleaR.pdf\"\n },\n {\n title: \"Poster IV.4. Causal Inference with Non-IID Data under Model Uncertainty\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/poster_3.pdf\"\n },\n {\n title: \"Poster IV.5. Factual Observation Based Heterogeneity Learning for Counterfactual Prediction\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/poster.pdf\"\n },\n {\n title: \"Poster IV.6. On the Interventional Kullback-Leibler Divergence\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/On_the_IKL_div_poster.pdf\"\n },\n {\n title: \"Poster IV.7. Unsupervised Object Learning via Common Fate\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/poster_tangemann_et_al.pdf\"\n },\n {\n title: \"Poster IV.8. Distinguishing Cause from Effect on Categorical Data: The Uniform Channel Model\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/Figueiredo_CleaR_2023_Poster.pdf\"\n },\n {\n title: \"Poster IV.9. Jointly Learning Consistent Causal Abstractions Over Multiple Interventional Distributions\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/poster1.pdf\"\n },\n {\n title: \"Poster IV.10. Causal Abstraction with Soft Interventions\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/CausalAbsractionWithSoftInterventions.pdf\"\n },\n];\n\nclass ZoomLinks extends React.Component {\n constructor(props) {\n super(props);\n this.state = {\n isAuthenticated: false,\n password: \"\",\n };\n this.handleSubmit = this.handleSubmit.bind(this);\n this.handleChange = this.handleChange.bind(this);\n }\n\n handleSubmit(event) {\n event.preventDefault();\n // This is a dummy password, replace this with your own password.\n const correctPassword = \"causality2023_p\";\n if (this.state.password === correctPassword) {\n this.setState({ isAuthenticated: true });\n } else {\n alert(\"Incorrect password\");\n }\n }\n\n handleChange(event) {\n this.setState({ password: event.target.value });\n }\n\n render() {\n if (this.state.isAuthenticated) {\n return (\n
    \n

    Registered participants can attend keynotes and oral presentations (see schedule) remotely. Please connect to this Zoom meeting:

    \n \n

    This link is used for all keynotes and orals. Note that in-person poster sessions, the townhall, panel discussion, and social activities will not be streamed.

    \n

    When participating remotely, please mute yourself (except when asking questions). Feel free to ask questions either in the chat or by raising your hand in Zoom and asking them over video.

    \n

    Virtual posters can be found below.

    \n \n \n \n \n \n \n \n \n {papers.map((paper) => (\n \n \n \n \n ))}\n \n
    Paper titleDownload link
    {paper.title}Download
    \n
    \n );\n } else {\n return (\n
    \n
    Enter password to access Zoom links and posters
    \n \n \n
    \n );\n }\n }\n}\n\n\n\nconst OnlineSchedulepage = () => {\n return (\n
    \n Online Schedule\n \n
    \n )\n}\n\nexport default OnlineSchedulepage\n\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst SocialActivities2023page = () => {\n return (\n
    \n Social Activities\n\n

    Tue, 11 Apr: Bar Night

    \n Casual pub evening, starting from 20:00 at Freistil Taproom.\n
      \n
    • For those interested, meet already at 18:30 at Neckarmüller Brewery for a beer and some dinner.
    • \n
    • They also serve a limited selection of plates at Freistil.
    • \n
    • Later in the evening, we may crawl to a couple of other bars in the center.
    • \n
    \n

    Wed, 12 Apr: Conference Dinner

    \n Official conference dinner, starting from 18:30 at Museum (upstairs from 1821).\n
      \n
    • 18:30--19:30: Reception with bar (drinks paid directly by participants)
    • \n
    • From 19:30: Buffet style dinner
    • \n
    • Dress code: wear whatever you are comfortable with
    • \n
    • You may need to pay for alcoholic beverages during dinner
    • \n
    \n

    Thu, 13 Apr: Hike

    \n Group hike to the neighbouring village Bebenhausen.\n
      \n
    • We will meet at and leave from the conference venue at 14:30.
    • \n
    • The destination is the 12th century monastery in Bebenhausen.
    • \n
    • We recommend wearing shoes suitable for hiking.
    • \n
    • Duration: ca 2.5h (45min to Bebenhausen, mostly downhill + time to walk around + 1h back, mostly uphill incl. a somewhat steep part).
    • \n
    • Whoever wants stays to explore the monastery and the palace (tickets on their own cost) need to find their way back. \n The rest of the people continue hiking on this trail.\n
    • \n
    • \n Recommendations: Download the outdoor active app on your phone and save the trail. \n We plan to move as a group but just in case someone moves apart.\n
    • \n
    \n If you have questions about any of the above activities, just ask the social co-chairs Atalanti and Julius during the conference.\n\n
    \n )\n}\n\nexport default SocialActivities2023page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst StudentVolunteers2023page = () => {\n return (\n
    \n Student Volunteers\n

    TBD

    \n
    \n )\n}\n\nexport default StudentVolunteers2023page\n","import React from \"react\";\nimport \"./DatesTable.scss\";\n\nconst DatesTable = ({ array }) => {\n return (\n \n \n \n \n \n \n \n \n {array.map(({ name, date }) => (\n \n \n \n \n ))}\n \n
    WhatWhen
    {name}{date}
    \n );\n};\n\nexport default DatesTable;\n","import React from 'react'\nimport DatesTable from './components/DateTable/DatesTable'\nimport Title from '../../../components/Title/Title'\nimport { importantDates2025 } from '../../../common/constants/importantDates'\n\nconst ImportantDates2025page = () => {\n return (\n <>\n Important Dates\n \n \n )\n}\n\nexport default ImportantDates2025page;\n","import React from \"react\";\nimport ShowMoreText from 'react-show-more-text';\nimport \"./PaperTable.scss\";\n\nconst PaperTable = ({ array }) => {\n return (\n
    \n {array.map(({title, author, filePath, description}) => (\n \n \n

    {title}

    \n {author}

    \n Download PDF\n {description}\n \n \n ))}\n
    \n );\n};\n\nexport default PaperTable;\n","const acceptedDatasets = [\n {\n title: \"The Structurally Complex with Additive Parent Causality (SCARY) Dataset\",\n author:\n \"Jarry Chen, Haytham M. Fayek\",\n filePath: \"/2023/AcceptedDatasets/chen23a.pdf\",\n },\n {\n title: \"3DIdentBox: A Toolbox for Identifiability Benchmarking\",\n author:\n \"Alice Bizeul, Imant Daunhawer, Emanuele Palumbo, Bernhard Schölkopf, Alexander Marx, Julia E. Vogt\",\n filePath: \"/2023/AcceptedDatasets/bizeul23a.pdf\",\n },\n {\n title: \"Causal Benchmark Based on Disentangled Image Dataset\",\n author:\n \"Liyuan Xu, Arthur Gretton\",\n filePath: \"/2023/AcceptedDatasets/xu23a.pdf\",\n },\n {\n title: \"Synthetic Time Series: A Dataset for Causal Discovery\",\n author:\n \"Julio Munoz-Benıtez, L. Enrique Sucar\",\n filePath: \"/2023/AcceptedDatasets/munozbenitez23a.pdf\",\n },\n {\n title: \"SpaCE: The Spatial Confounding (Benchmarking) Environment\",\n author:\n \"Mauricio Tec, Ana Trisovic, Michelle Audirac, Francesca Dominici\",\n filePath: \"/2023/AcceptedDatasets/tec23a.pdf\",\n },\n {\n title: \"CausalEdu: a real-world education dataset for temporal causal discovery and inference\",\n author:\n \"Wenbo Gong, Digory Smith, Zichao Wang, Craig Barton, Simon Woodhead, Nick Pawlowski, Joel Jennings, Cheng Zhang\",\n filePath: \"/2023/AcceptedDatasets/gong23a.pdf\",\n },\n];\n\nexport default acceptedDatasets;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport PaperTable from \"./components/PaperTable/PaperTable\";\nimport acceptedDatasets from \"../../../common/constants/acceptedDatasets_2025\";\n\nconst AcceptedDatasets2025page = () => {\n return (\n //
    \n // Accepted Papers\n // TBD\n //
    \n
    \n Accepted Datasets\n \n
    \n\n );\n};\n\nexport default AcceptedDatasets2025page;\n","import React from \"react\";\nimport ShowMoreText from 'react-show-more-text';\nimport \"./PaperTable.scss\";\n\nconst PaperTable = ({ array }) => {\n return (\n
    \n {array.map(({title, author ,description}) => (\n \n \n

    {title}

    \n {author}\n {description}\n \n \n ))}\n
    \n );\n};\n\nexport default PaperTable;\n","const acceptedPapers = [\n {\n title: \"Generalizing Clinical Trials with Convex Hulls\",\n author:\n \"Eric Strobl, Thomas A Lasko\",\n },\n {\n title: \"Backtracking Counterfactuals\",\n author:\n \"Julius Von Kügelgen, Abdirisak Mohamed, Sander Beckers\",\n },\n {\n title: \"Stochastic Causal Programming for Bounding Treatment Effects\",\n author:\n \"Kirtan Padh, Jakob Zeitler, David Watson, Matt Kusner, Ricardo Silva, Niki Kilbertus\",\n },\n {\n title: \"Distinguishing Cause from Effect on Categorical Data: The Uniform Channel Model\",\n author:\n \"Mario A. T. Figueiredo, Catarina Oliveira\",\n },\n {\n title: \"Jointly Learning Consistent Causal Abstractions Over Multiple Interventional Distributions\",\n author:\n \"Fabio Massimo Zennaro, Máté Drávucz, Geanina Apachitei, W. Dhammika Widanage, Theodoros Damoulas\",\n },\n {\n title: \"Causal Abstraction with Soft Interventions\",\n author:\n \"Riccardo Massidda, Atticus Geiger, Thomas Icard, Davide Bacciu\",\n },\n {\n title: \"Directed Graphical Models and Causal Discovery for Zero-Inflated Data\",\n author:\n \"Shiqing Yu, Mathias Drton, Ali Shojaie\",\n }, \n {\n title: \"An Algorithm and Complexity Results for Causal Unit Selection\",\n author:\n \"Haiying Huang, Adnan Darwiche\",\n },\n];\n\nexport default acceptedPapers;\n","const acceptedPapers = [\n {\n title: \"Sample-Specific Root Causal Inference with Latent Variables\",\n author: \"Eric Strobl, Thomas A Lasko\",\n },\n {\n title: \"Causal Discovery for Non-stationary Non-linear Time Series Data Using Just-In-Time Modeling\",\n author: \"Daigo Fujiwara, Kazuki Koyama, Keisuke Kiritoshi, Tomomi Okawachi, Tomonori Izumitani, Shohei Shimizu\",\n },\n {\n title: \"Causal Models with Constraints\",\n author: \"Sander Beckers, Joseph Halpern, Christopher Hitchcock\",\n },\n {\n title: \"Non-parametric identifiability and sensitivity analysis of synthetic control models\",\n author: \"Jakob Zeitler, Athanasios Vlontzos, Ciarán Mark Gilligan-Lee\",\n },\n {\n title: \"Factorization of the Partial Covariance in Singly-Connected Path Diagrams\",\n author: \"Jose Peña\",\n },\n {\n title: \"Estimating long-term causal effects from short-term experiments and long-term observational data with unobserved confounding\",\n author: \"Graham Van Goffrier, Lucas Maystre, Ciarán Mark Gilligan-Lee\",\n },\n {\n title: \"Local Dependence Graphs for Discrete Time Processes\",\n author: \"Wojciech Niemiro, Łukasz Rajkowski\",\n },\n {\n title: \"Scalable Causal Discovery with Score Matching\",\n author: \"Francesco Montagna, Nicoletta Noceti, Lorenzo Rosasco, Kun Zhang, Francesco Locatello\",\n },\n {\n title: \"Causal Discovery with Score Matching on Additive Models with Arbitrary Noise\",\n author: \"Francesco Montagna, Nicoletta Noceti, Lorenzo Rosasco, Kun Zhang, Francesco Locatello\",\n },\n {\n title: \"Beyond the Markov Equivalence Class: Extending Causal Discovery under Latent Confounding\",\n author: \"Mirthe Maria Van Diepen, Ioan Gabriel Bucur, Tom Heskes, Tom Claassen\",\n },\n {\n title: \"Learning Conditional Granger Causal Temporal Networks\",\n author: \"Ananth Balashankar, Srikanth Jagabathula, Lakshmi Subramanian\",\n },\n {\n title: \"Learning Causal Representations of Single Cells via Sparse Mechanism Shift Modeling\",\n author: \"Romain Lopez, Natasa Tagasovska, Stephen Ra, Kyunghyun Cho, Jonathan Pritchard, Aviv Regev\",\n },\n {\n title: \"Branch-Price-and-Cut for Causal Discovery\",\n author: \"James Cussens\",\n },\n {\n title: \"Instrumental Processes Using Integrated Covariances\",\n author: \"Søren Wengel Mogensen\",\n },\n {\n title: \"A Meta-Reinforcement Learning Algorithm for Causal Discovery\",\n author: \"Andreas W.M. Sauter, Erman Acar, Vincent Francois-Lavet\",\n },\n {\n title: \"Causal Inference Despite Limited Global Confounding via Mixture Models\",\n author: \"Spencer L. Gordon, Bijan Mazaheri, Yuval Rabani, Leonard Schulman\",\n },\n {\n title: \"Causal Triplet: An Open Challenge for Intervention-centric Causal Representation Learning\",\n author: \"Yuejiang Liu, Alexandre Alahi, Chris Russell, Max Horn, Dominik Zietlow, Bernhard Schölkopf, Francesco Locatello\",\n },\n {\n title: \"Image-based Treatment Effect Heterogeneity\",\n author: \"Connor Thomas Jerzak, Fredrik Daniel Johansson, Adel Daoud\",\n },\n {\n title: \"Causal Learning through Deliberate Undersampling\",\n author: \"Kseniya Solovyeva, David Danks, Mohammadsajad Abavisani, Sergey Plis\",\n },\n {\n title: \"Influence-Aware Attention for Multivariate Temporal Point Processes\",\n author: \"Xiao Shou, Tian Gao, Dharmashankar Subramanian, Debarun Bhattacharjya, Kristin Bennett\",\n },\n {\n title: \"Evaluating Temporal Observation-Based Causal Discovery Techniques Applied to Road Driver Behaviour\",\n author: \"Rhys Peter Matthew Howard, Lars Kunze\",\n },\n {\n title: \"On Discovery of Local Independence over Continuous Variables via Neural Contextual Decomposition\",\n author: \"Inwoo Hwang, Yunhyeok Kwak, Yeon-Ji Song, Byoung-Tak Zhang, Sanghack Lee\",\n },\n {\n title: \"Local Causal Discovery for Estimating Causal Effects\",\n author: \"Shantanu Gupta, David Childers, Zachary Chase Lipton\",\n },\n {\n title: \"Can Active Sampling Reduce Causal Confusion in Offline Reinforcement Learning?\",\n author: \"Gunshi Gupta, Tim G. J. Rudner, Rowan Thomas McAllister, Adrien Gaidon, Yarin Gal\",\n },\n {\n title: \"Causal Inference with Non-IID Data under Model Uncertainty\",\n author: \"Chi Zhang, Karthika Mohan, Judea Pearl\",\n },\n {\n title: \"Factual Observation Based Heterogeneity Learning for Counterfactual Prediction\",\n author: \"Hao Zou, Haotian Wang, Renzhe Xu, Bo Li, Jian Pei, Ye Jun Jian, Peng Cui\",\n },\n {\n title: \"On the Interventional Kullback-Leibler Divergence\",\n author: \"Jonas Bernhard Wildberger, Siyuan Guo, Arnab Bhattacharyya, Bernhard Schölkopf\",\n },\n {\n title: \"Unsupervised Object Learning via Common Fate\",\n author: \"Matthias Tangemann, Steffen Schneider, Julius Von Kügelgen, Francesco Locatello, Peter Vincent Gehler, Thomas Brox, Matthias Kuemmerer, Matthias Bethge, Bernhard Schölkopf\",\n },\n {\n title: \"Practical Algorithms for Orientations of Partially Directed Graphical Models\",\n author: \"Malte Luttermann, Marcel Wienöbst, Maciej Liskiewicz\",\n },\n {\n title: \"Enhancing Causal Discovery from Robot Sensor Data in Dynamic Scenarios\",\n author: \"Luca Castri, Sariah Mghames, Marc Hanheide, Nicola Bellotto\",\n },\n {\n title: \"Leveraging Causal Graphs for Blocking in Randomized Experiments\",\n author: \"Abhishek Kumar Umrawal\",\n },\n];\n\nexport default acceptedPapers;\n","const acceptedPapers = [\n {\n title: \"Backtracking Counterfactuals\",\n author:\n \"Julius Von Kügelgen, Abdirisak Mohamed, Sander Beckers\",\n },\n];\n\nexport default acceptedPapers;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport PaperTable from \"./components/PaperTable/PaperTable\";\nimport acceptedPapers_oral from \"../../../common/constants/acceptedPapers_oral2025\";\nimport acceptedPapers_poster from \"../../../common/constants/acceptedPapers_poster2025\";\nimport acceptedPapers_best from \"../../../common/constants/acceptedPapers_best2025\";\nconst AcceptedPapers2025page = () => {\n return (\n //
    \n // Accepted Papers\n // TBD\n //
    \n
    \n Accepted Papers\n
    Best Paper award
    \n \n
    Oral
    \n \n
    Poster
    \n \n
    \n );\n};\n\nexport default AcceptedPapers2025page;\n","import React from \"react\";\nimport \"./OCTable.scss\";\n\nconst OCTable = ({ array }) => {\n return (\n \n {array.map(({ position, persons }) => (\n \n \n \n ))}\n
    \n

    {position}

    \n {persons.map(({ name, href, address }) => (\n
    \n \n {name}\n \n
    \n {address}\n
    \n ))}\n
    \n );\n};\n\nexport default OCTable;\n","import React from 'react'\nimport Title from '../../../components/Title/Title'\nimport OCTable from './components/OCTable/OCTable'\nimport { array2025 } from '../../../common/constants/organizingCommittee'\n\n\nconst OrganizingCommittee2025page = () => {\n return (\n
    \n Organizing Committee\n \n
    \n )\n}\n\nexport default OrganizingCommittee2025page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst ReviewerInstruction2025page = () => {\n return (\n
    \n Reviewer Instructions\n

    Thank you for agreeing to review CLeaR 2025! Your assessment is vital to creating a high quality program. This page provides the review guidelines that will help you to write reviews efficiently and effectively.

    \n
    Main tasks
    \n
      \n
    1. Preparation (date TBA)
    2. \n
        \n
      • CLeaR 2025 is using the OpenReview System. Please create your OpenReview profile if you do not have one and make sure it is up to date if you already have an account.
      • \n
      • Reviewer invitations will be sent via noreply@openreview.net. Please accept the reviewer invitation before the expiry date.
      • \n
      • Please read and agree to CLeaR 2025 codes of conduct and declare the right conflicts of interests.
      • \n
      \n
    3. Paper bidding and assignments checking (date TBA)
    4. \n
        \n
      • Please bid on the papers that fall into your area of expertise. Your bidding is an important input to the overall matching results.
      • \n
      • Please check the assigned papers right after the paper assignment. If you do not feel qualified to review a paper or find potential conflicts of interest, please communicate with your AC as soon as possible.
      • \n
      \n
    5. Write thorough and timely reviews: (date TBA)
    6. \n
        \n
      • Please make your review as deep and detailed as possible. Superficial reviews are not really helpful in making final decisions. It is also important to treat each submission fairly and provide unbiased reviews.
      • \n
      • A review form has been designed to facilitate the review process. Please refer to the “Review Form” section for a step-by-step instruction on how to answer each question in the review form.
      • \n
      \n
    7. Discuss with authors/fellow reviewers/ACs (date TBA)
    8. \n
        \n
      • Before the start of discussions, please carefully read author responses with an open mind to avoid possible misunderstandings. Even if the author's rebuttal does not change your opinion, please acknowledge that you have read and considered it.
      • \n
      • A further discussion with the authors will be enabled during the discussion period. If you want the authors to clarify more things after reading the rebuttal, you can discuss with them on the paper’s page.
      • \n
      • All reviewers should actively participate in discussions with fellow reviewers and ACs to have a more comprehensive understanding of each paper. The discussions are especially important for borderline papers and papers with high variance assessments. While engaging in the discussion, please be professional, polite, and keep an open mind. Although full consensus makes the final decision easier, it is not mandatory in the reviewing process, as different people may have different perspectives.
      • \n
      • If you change your opinion during or after the discussion phase, please update your ratings and give specific reasons in the final comments.
      • \n
      \n
    \n
    Review form
    \n
      \n
    1. Summary. Summarize the main contributions of each paper. The contributions may be new problems, theories, methods, algorithms, applications, benchmarks, etc.
    2. \n
    3. Main review. Please provide an in-depth review of each paper by considering the following aspects:
    4. \n
        \n
      • Originality: Does the paper provide anything new, like a new problem or a new method? Is the novelty compared to existing works well justified? Is it possible that similar ideas have been studied but the paper does not cite them properly?
      • \n
      • Significance: Does the paper address an important problem? How relevant are the results to the CLeaR community? Does the proposed theory or method significantly advance the state-of-the-art? Do the results in the paper provide new insights to the research problem? Is this paper likely to have broad impacts outside the CLeaR community, e.g., in natural/social science or engineering?
      • \n
      • Technical quality: Is the proposed approach technically sound? Are claims substantiated by theoretical and/or empirical results? Are the derivations and proofs correct? Is the proposed method unnecessarily complicated? Are the hyperparameters tuned in an appropriate manner?
      • \n
      • Clarity: Is the submission clearly written and well organized? Is the take home message easily extractable from the paper? Is the motivation well explained by illustrations and examples? Are the technical details described rigorously? Is there a significant amount of typos that make the paper hard to read?
      • \n
      \n
    5. Overall score. We use a 10-point scoring system for the overall assessment. Please select the category that best describes your assessment of the paper.
    6. \n
        \n
      • 10: Top 5% of accepted papers, seminal paper
      • \n
      • 9: Top 15% of accepted papers, strong accept
      • \n
      • 8: Top 50% of accepted papers, clear accept
      • \n
      • 7: Good paper, accept
      • \n
      • 6: Marginally above acceptance threshold
      • \n
      • 5: Marginally below acceptance threshold
      • \n
      • 4: Ok but not good enough - rejection
      • \n
      • 3: Clear rejection
      • \n
      • 2: Strong rejection
      • \n
      • 1: Trivial or wrong
      • \n
      \n
    7. Confidence score. Please select the category that best describes your confidence in the assessment of the submission.
    8. \n
        \n
      • 5: You are absolutely certain about your assessment. You are very familiar with the related work and checked the math/other details carefully.
      • \n
      • 4: You are confident in your assessment, but not absolutely certain. It is unlikely, but not impossible, that you did not understand some parts of the submission or that you are unfamiliar with some pieces of related work.
      • \n
      • 3: You are fairly confident in your assessment. It is possible that you did not understand some parts of the submission or that you are unfamiliar with some pieces of related work. Math/other details were not carefully checked.
      • \n
      • 2: You are willing to defend your assessment, but it is quite likely that you did not understand central parts of the submission or that you are unfamiliar with some pieces of related work. Math/other details were not carefully checked.
      • \n
      • 1: Your assessment is an educated guess. The submission is not in your area or the submission was difficult to understand. Math/other details were not carefully checked.
      • \n
      \n
    \n
    Policies
    \n

    Confidentiality. By reviewing CLeaR 2025, you must agree to keep all material and information related to the review confidential. In particular, you must not use ideas and results from submitted papers in your own research or distribute them to others. You should delete all reviewing material, such as the submitted code, at the end of the reviewing cycle. You should not talk about submissions or content related to the reviewing of submissions to anyone without prior approval from the program chairs.

    \n

    Double-blind reviewing. The CLeaR review process is double-blind: reviewers and authors will both stay anonymous to each other during the review process. However, author names will be visible to area chairs and program chairs. Authors are responsible for anonymizing their submissions. Submissions may not contain any identifying information that may violate the double-blind reviewing policy. If you are assigned a submission that is not adequately anonymized, please contact the corresponding AC. Also, you should not attempt to find out the identities of authors for any of your assigned submissions, e.g., by searching arXiv preprints. Reviewer names are visible to the area chair (and program chairs), but the reviewers will not know names of other reviewers. Please do not disclose your identity to authors and fellow reviewers in the discussions.

    \n

    Dual Submissions.CLeaR does not allow double submissions. Namely, submissions should not have been previously published in or submitted to a journal or the proceedings of another conference at any point during the CLeaR review process. Submissions as extended abstracts (5 pages or less), to workshops or non-archival venues (without a proceedings), will not be considered a concurrent submission. Authors may submit anonymized work to CLeaR that is already available as a preprint (e.g., on arXiv) without citing it. If you suspect that a submission that has been assigned to you is a dual submission or if you require further clarification, please contact the corresponding AC. Please see Call for Papers for more information about dual submissions.

    \n

    Violations of formatting instructions. Submissions are limited to 12 single-column PMLR-formatted pages, plus unlimited additional pages for references and appendices. Authors of accepted papers will have the option of opting out of the proceedings in favor of a 1-page extended abstract, which will point to an open access archival version of the full paper reviewed for CLeaR. If you are assigned a paper that is overlength or appears to violate the CLeaR proceedings format (e.g., by decreasing margins or font size, by removing some pre-fixed spaces, etc), please notify the corresponding AC immediately.

    \n\n

    * Please also review the policies in the CLeaR 2025 Call for Papers.

    \n
    \n )\n}\n\nexport default ReviewerInstruction2025page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport OCTable from '../OrganizingCommitteePage/components/OCTable/OCTable'\nimport {array ,array1} from '../../../common/constants/advisoryboard'\n\nconst AdvisoryBoard2025page = () => {\n return (\n
    \n Advisory Board\n \n \n \n \n \n
    \n
    \n )\n}\n\nexport default AdvisoryBoard2025page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport './SponsorsPage.scss';\n\nconst Sponsors2025page = () => {\n return (\n
    \n Sponsors\n
    \n

    TBA

    \n
    \n
    \n )\n}\n\nexport default Sponsors2025page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst AreaChair2025page = () => {\n return (\n
    \n Area Chairs\n Bo Han (Hong Kong Baptist University)
    \n David Danks (University of California, San Diego)
    \n Elina Robeva (University of British Columbia)
    \n Emre Kıcıman (Microsoft Research)
    \n Francesco Locatello (Institute of Science and Technology Austria)
    \n Jiji Zhang (Chinese University of Hong Kong)
    \n Kun Zhang (Carnegie Mellon University)
    \n Linbo Wang (University of Toronto)
    \n Mingming Gong (University of Melbourne)
    \n Niels Richard Hansen (University of Copenhagen)
    \n Niki Kilbertus (Technical University of Munich)
    \n Peter Spirtes (Carginie Mellon University)
    \n Qingyuan Zhao (University of Cambridge)
    \n Razieh Nabi (Emory University)
    \n Ricardo Silva (University College London)
    \n Sara Magliacane (University of Amsterdam)
    \n Shohei Shimizu (Shiga University)
    \n Sofia Triantafillou (University of Crete)
    \n Tom Claassen (Radboud University Nijmegen)
    \n Tongliang Liu (University of Sydney)
    \n
    \n )\n}\n\nexport default AreaChair2025page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst ProgramCommittee2025page = () => {\n return (\n
    \n Program Committee\n\n TBD\n
    \n )\n}\n\nexport default ProgramCommittee2025page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport DatesTable from \"../ImportantDatesPage/components/DateTable/DatesTable\";\nimport { importantDates2025 } from \"../../../common/constants/importantDates\";\nimport \"./CLeaR2025Page.scss\";\n\nconst CLeaR2025page = () => {\n return (\n
    \n CLeaR (Causal Learning and Reasoning) 2025\n
    \n

    \n Causality is a fundamental notion in science and engineering. In the past few decades, some of the most influential developments in the study of causal discovery, causal inference, and the causal treatment of machine learning have resulted from cross-disciplinary efforts. In particular, a number of machine learning and statistical analysis techniques have been developed to tackle classical causal discovery and inference problems. On the other hand, the causal view has been shown to be able to facilitate formulating, understanding, and tackling a number of hard machine learning problems in transfer learning, reinforcement learning, and deep learning.\n

    \n

    \n We invite submissions to the 4th conference on Causal Learning and Reasoning (CLeaR),\n and welcome paper submissions that describe new theory, methodology, and/or applications relevant to any aspect of causal\n learning and reasoning in the fields of artificial intelligence and statistics. Submitted papers will be evaluated based\n on their novelty, technical quality, and potential impact. Experimental methods and results are expected to be reproducible,\n and authors are strongly encouraged to make code and data available.\n We also encourage submissions of proof-of-concept research that puts forward novel ideas and demonstrates potential for\n addressing problems at the intersection of causality and machine learning.\n

    \n

    CLeaR 2025 will be held in Lausanne, Switzerland, May 7 to May 9, 2025.

    \n Topics of submission may include, but are not limited to:\n
      \n
    • Machine learning building on causal principles
    • \n
    • Causal discovery in complex environments
    • \n
    • Efficient causal discovery in large-scale datasets
    • \n
    • Causal effect identification and estimation
    • \n
    • Causal generative models for machine learning
    • \n
    • Unsupervised and semi-supervised deep learning connected to causality
    • \n
    • Machine learning with heterogeneous data sources
    • \n
    • Benchmark for causal discovery and causal reasoning
    • \n
    • Reinforcement learning
    • \n
    • Fairness, accountability, transparency, explainability, trustworthiness, and recourse
    • \n
    • Applications of any of the above to real-world problems
    • \n
    • Causal representation learning
    • \n
    • Causal inference in philosophy and psychology
    • \n
    \n

    The program at the conference will highlight new applications of causality to real-world problems, benchmark datasets,\n or benchmark methodologies.

    \n
    \n

    \n
    \n
    Important Dates
    \n \n
    \n
    \n );\n};\n\nexport default CLeaR2025page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst FullAgenda2025page = () => {\n return (\n
    \n Full Agenda\n

    Below please find the schedule of the CLeaR 2025 conference:

    \n\n
    Day 1 (May 7, Wednesday)
    \n \n \n 08:45-09:15: Registration\n \n \n 09:15-09:30: Welcome\n \n \n 09:30-10:30: Keynote by David Blei\n {/*
      \n
    • \n
    */}\n \n \n 10:30-11:00: Coffee break\n \n \n 11:00-12:15: Oral Session I\n
      \n
    • Oral I.1. Algorithmic Syntactic Causal Identification
    • \n
    • Oral I.2. The Probability of Tiered Benefit: Partial Identification with Robust and Stable Inference
    • \n
    • Oral I.3. Stabilized Inverse Probability Weighting via Isotonic Calibration
    • \n
    \n \n \n 12:15-14:00: Lunch break\n \n \n 14:00-15:30: Poster Session I\n \n
      \n
    • Poster I.1. Algorithmic Syntactic Causal Identification
    • \n
    • Poster I.2. Contagion Effect Estimation Using Proximal Embeddings
    • \n
    • Poster I.3. Matchings, Predictions and Counterfactual Harm in Refugee Resettlement Processes
    • \n
    • Poster I.4. Automatic Debiasing of Neural Networks via Moment Constrained Learning
    • \n
    • Poster I.5. Non-Parametric Conditional Independence Testing for Mixed Continuous-Categorical Variables: A Novel Method and Numerical Evaluation
    • \n
    • Poster I.6. Encode-Decoder-Based GAN for Estimating Counterfactual Outcomes Under Sequential Selection Bias and Combinatorial Explosion
    • \n
    • Poster I.7. Interpretable Neural Causal Models with TRAM-DAGs
    • \n
    • Poster I.8. Bounds and Sensitivity Analysis of the Causal Effect Under Outcome-Independent MNAR Confounding
    • \n
    • Poster I.9. Disparate Effect of Missing Mediators on Transportability of Causal Effects
    • \n
    • Poster I.10. Optimizing Multi-Scale Representations to Detect Effect Heterogeneity Using Earth Observation and Computer Vision: Applications to Two Anti-Poverty RCTs
    • \n
    • Poster I.11. Local Interference: Removing Interference Bias in Semi-Parametric Causal Models
    • \n
    • Poster I.12. The Probability of Tiered Benefit: Partial Identification with Robust and Stable Inference
    • \n
    • Poster I.13. Probably Approximately Correct High Dimensional Causal Effect Estimation Given a Valid Adjustment Set
    • \n
    • Poster I.14. Stabilized Inverse Probability Weighting via Isotonic Calibration
    • \n
    • Poster I.15. Beyond Flatland: A Geometric Take on Matching Methods for Treatment Effect Estimation
    • \n
    • Poster I.16. Network Causal Effect Estimation in Graphical Models of Contagion and Latent Confounding
    • \n
    • Poster I.17. Your Assumed DAG is Wrong and Here's How to Deal with It
    • \n
    • Poster I.18. Causal Drivers of Dynamic Networks
    • \n
    • Poster I.19. Compositional Models for Estimating Causal Effects
    • \n
    • Poster I.20. Causal Identification in Time Series Models
    • \n
    \n \n 15:30-16:00: Coffee break\n \n \n 16:00-17:15: Oral Session II\n
      \n
    • Oral II.1. Causal Bandits Without Graph Learning
    • \n
    • Oral II.2. Combining Causal Models for More Accurate Abstractions of Neural Networks
    • \n
    • Oral II.3. Algorithmic Causal Structure Emerging Through Compression
    • \n
    \n \n
    \n\n
    \n
    Day 2 (May 8, Thursday)
    \n \n \n 09:15-10:15: Keynote by Erin Gabriel\n {/*
      \n
    • \n
    */}\n \n \n 10:15-10:45: Coffee break\n \n \n 10:45-12:15: Poster Session II\n \n
      \n
    • Poster II.1. Fair Clustering: A Causal Perspective
    • \n
    • Poster II.2. Beyond Single-Feature Importance with ICECREAM
    • \n
    • Poster II.3. Actual Causation and Nondeterministic Causal Models
    • \n
    • Poster II.4. Aligning Graphical and Functional Causal Abstractions
    • \n
    • Poster II.5. Transfer Learning in Latent Contextual Bandits with Covariate Shift Through Causal Transportability
    • \n
    • Poster II.6. Causal Bandits Without Graph Learning
    • \n
    • Poster II.7. Counterfactual Influence in Markov Decision Processes
    • \n
    • Poster II.8. Omitted Labels Induce Nontransitive Paradoxes in Causality
    • \n
    • Poster II.9. The Causal-Effect Score in Data Management
    • \n
    • Poster II.10. Inducing Causal Structure Applied to Glucose Prediction for T1DM Patients
    • \n
    • Poster II.11. Combining Causal Models for More Accurate Abstractions of Neural Networks
    • \n
    • Poster II.12. Counterfactual Explanability of Black-Box Prediction Models
    • \n
    • Poster II.13. MXMap: A Multivariate Cross Mapping Framework for Causal Discovery in Dynamical Systems
    • \n
    • Poster II.14. Counterfactual Token Generation in Large Language Models
    • \n
    • Poster II.15. Algorithmic Causal Structure Emerging Through Compression
    • \n
    • Poster II.16. On Measuring Intrinsic Causal Attributions in Deep Neural Networks
    • \n
    • Poster II.17. Relational Object-Centric Actor-Critic
    • \n
    • Poster II.18. Extending Structural Causal Models for Autonomous Vehicles to Simplify Temporal System Construction & Enable Dynamic Interactions Between Agents
    • \n
    \n \n 12:15-14:00: Lunch break\n \n \n 14:00-15:30: Oral Session III\n
      \n
    • Oral III.1. Causal Reasoning in Difference Graphs
    • \n
    • Oral III.2. An Asymmetric Independence Model for Causal Discovery on Path Spaces
    • \n
    • Oral III.3. Scalable Causal Structure Learning via Amortized Conditional Independence Testing
    • \n
    \n \n \n 15:30-16:00: Coffee break\n \n \n 16:00-16:30: Town hall discussion\n \n \n 16:30-18:00: Transition to social event\n \n \n 18:00: Meet at Aquatis (Lausanne aquarium)\n \n \n 18:15-19:15: Aquarium visit\n \n \n 19:15-22:00: Buffet dinner\n \n
    \n\n
    \n
    Day 3 (May 9, Friday)
    \n \n \n 09:15-10:15: Keynote by Elias Bareinboim\n {/*
      \n
    • \n
    */}\n \n \n 10:15-10:45: Coffee break\n \n \n 10:45-12:15: Poster Session III\n \n
      \n
    • Poster III.1. Causal Reasoning in Difference Graphs
    • \n
    • Poster III.2. Shapley-PC: Constraint-Based Causal Structure Learning with a Shapley Inspired Framework
    • \n
    • Poster III.3. Robust Multi-View Co-Expression Network Inference
    • \n
    • Poster III.4. The CausalBench Challenge: A Machine Learning Contest for Gene Network Inference from Single-Cell Perturbation Data
    • \n
    • Poster III.5. Score Matching Through the Roof: Linear, Nonlinear, and Latent Variables Causal Discovery
    • \n
    • Poster III.6. Exact Discovery is Polynomial for Certain Sparse Causal Bayesian Networks
    • \n
    • Poster III.7. Cross-Validating Causal Discovery via Leave-One-Variable-Out
    • \n
    • Poster III.8. The Interventional Bayesian Gaussian Equivalent Score for Bayesian Causal Inference with Unknown Soft Interventions
    • \n
    • Poster III.9. The Landscape of Causal Discovery Data: Grounding Causal Discovery in Real-World Applications
    • \n
    • Poster III.10. An Asymmetric Independence Model for Causal Discovery on Path Spaces
    • \n
    • Poster III.11. AGM-TE: Approximate Generative Model Estimator of Transfer Entropy for Causal Discovery
    • \n
    • Poster III.12. Controlling for Discrete Unmeasured Confounding in Nonlinear Causal Models
    • \n
    • Poster III.13. Constraint-Based Causal Discovery with Tiered Background Knowledge and Latent Variables in Single or Overlapping Datasets
    • \n
    • Poster III.14. Sample Complexity of Nonparametric Closeness Testing for Continuous Distributions and Its Application to Causal Discovery with Hidden Confounding
    • \n
    • Poster III.15. Multi-Domain Causal Discovery in Bijective Causal Models
    • \n
    • Poster III.16. Selecting Accurate Subgraphical Models from Possibly Inaccurate Graphical Models
    • \n
    • Poster III.17. Scalable Causal Structure Learning via Amortized Conditional Independence Testing
    • \n
    • Poster III.18. Temporal Inverse Probability Weighting for Causal Discovery in Controlled Before-After Studies: Discovering ADEs in Generics
    • \n
    • Poster III.19. Unitless Unrestricted Markov-Consistent SCM Generation: Better Benchmark Datasets for Causal Discovery
    • \n
    • Poster III.20. Nondeterministic Causal Models
    • \n
    \n \n 12:15-12:30: Wrap up\n \n
    \n
    \n
    \n )\n}\n\nexport default FullAgenda2025page;","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst KeynoteSpeaker2023page = () => {\n return (\n
    \n Plenary Speakers\n \n\n - Aapo Hyvarinen, University of Helsinki\n Title: Causal discovery and latent-variable models \n

    Abstract: There is a deep connection between causal discovery and latent-variable models such as factor analysis, independent component analysis, and various unsupervised deep learning models. In several cases, estimation of a latent-variable model enables causal discovery, due to a kind of equivalence of the two in a purely observational regime. The key concept here is identifiability: We have to find a latent-variable model which is identifiable, i.e. the parameters of the model can be uniquely estimated. Quite often, the identifiability of a latent-variable model then leads to identifiability of a causal discovery model. In this talk, I will review research on this connection and the relevant identifiability theory.

    \n\n\n - Miguel Hernan, CAUSALab / Biostatistics and Epidemiology, Harvard T.H. Chan School \n Title: Causal AI for data analysis in the health sciences \n

    Abstract: The tools referred to as AI may assist, or replace, health researchers who learn from data. This talk describes a taxonomy of learning tasks in science and explores the relationship between two of them: prediction (pattern recognition) and counterfactual prediction (causal inference). Researchers predict counterfactually by using a combination of data and causal models of the world. In contrast, AI tools developed for prediction using only data are being increasingly used for counterfactual prediction. This raises questions about the origin of causal models, and the future of causal inference research in the health sciences.

    \n\n\n - Negar Kiyavash, École polytechnique fédérale de Lausanne \n Title: Causal Identification: Are We There Yet?\n

    Abstract: We discuss causal identifiability, the canonical problem of causal inference, where the goal is to calculate the effect of intervening on subset of variables on an outcome variable of interest. We first visit the definition fo the problem and note that it is necessary to add positivity assumption of observational distribution to the original definition of the problem as without such an assumption the rules of do-calculus and consequently the proposed algorithms in the field are not sound. After discussing state of the art and recent progress in the field, we present some of the open problems and remaining challenges.

    \n\n
    \n
    \n )\n}\n\nexport default KeynoteSpeaker2023page","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\n\nconst CallforPapers2025page = () => {\n return (\n
    \n Call for Papers\n\n

    We invite submissions to the 4th Conference on Causal Learning and Reasoning (CLeaR), and welcome paper submissions that describe new theory, methodology, and/or applications relevant to any aspect of causal learning and reasoning in the fields of artificial intelligence and statistics. Accepted papers will be published in the Proceedings of Machine Learning Research (PMLR).\n

    \n
    Key dates
    \n The planned dates are as follows:\n
      \n
    • Paper submission deadline: Nov 2, 2024 11:59pm Nov 4, 2024 11:59pm (Anywhere on Earth, AoE)
    • \n
    • Reviews released: Dec 13, 2024
    • \n
    • Author rebuttals due: Dec 20, 2024 11:59pm Dec 23, 2024 11:59pm (AoE)
    • \n
    • Final decisions: Jan 27, 2025
    • \n
    • Camera-ready deadline: Mar 9, 2025 11:59pm (AoE)
    • \n
    • Conference dates: May 7 (Wed) - 9 (Fri), 2025.
    • \n
    \n

    Submit at https://openreview.net/group?id=cclear.cc/CLeaR/2025/Conference.

    \n\n
    Summary
    \n

    Causality is a fundamental notion in science and engineering. In the past few decades, some of the most influential developments in the study of causal discovery, causal inference, and the causal treatment of machine learning have resulted from cross-disciplinary efforts. In particular, a number of machine learning and statistical analysis techniques have been developed to tackle classical causal discovery and inference problems. On the other hand, the causal view has been shown to facilitate formulating, understanding, and tackling a broad range of problems, including domain generalization, robustness, trustworthiness, and fairness across machine learning, reinforcement learning, and statistics.\n

    \n

    We invite papers that describe new theory, methodology and/or applications relevant to any aspect of causal learning and reasoning in the fields of artificial intelligence and statistics. Submitted papers will be evaluated based on their novelty, technical quality, and potential impact. Experimental methods and results are expected to be reproducible, and authors are strongly encouraged to make code and data available. We also encourage submissions of proof-of-concept research that puts forward novel ideas and demonstrates potential for addressing problems at the intersection of causality and machine learning.\n Paper Submission\n The proceedings track is the standard CLeaR paper submission track. Papers will be selected via a rigorous double-blind peer-review process. All accepted papers will be presented at the Conference as contributed talks or as posters and will be published in the Proceedings.\n

    \n\n

    Topics of submission may include, but are not limited to:

    \n
      \n
    • Foundational theories of causation
    • \n
    • Causal effect identification and estimation
    • \n
    • Causal discovery in complex environments
    • \n
    • Efficient causal discovery in large-scale datasets
    • \n
    • Causal representation learning
    • \n
    • Machine learning (including reinforcement learning) building on causal principles
    • \n
    • Unsupervised and semi-supervised deep learning connected to causality
    • \n
    • Causal generative models for machine learning
    • \n
    • Machine learning and statistical methods for heterogeneous data sources
    • \n
    • Causality-empowered foundation models
    • \n
    • Causally rooted methods for fairness, accountability, transparency, explainability, trustworthiness, and recourse
    • \n
    • Benchmark for causal discovery and causal reasoning
    • \n
    • Applications of any of the above to real-world problems
    • \n
    \n\n
    Physical Attendance
    \n

    CLeaR 2025 is being planned as an in-person conference with hybrid elements accommodating online presentations when physical attendance is not possible.

    \n\n
    Formatting and Supplementary Material
    \n

    Submissions are limited to 12 single-column PMLR-formatted pages, plus unlimited additional pages for references and appendices. Authors of accepted papers will have the option of opting out of the proceedings in favor of a 1-page extended abstract, which will point to an open access archival version of the full paper reviewed for CLeaR. You can also submit a single file of additional supplementary material separately, which may be either a pdf file (containing proof details, for instance) or a zip file that can include multiple files of all formats (such as code or videos). Note that reviewers are under no obligation to examine the supplementary material.

    \n\n

    Please format the paper using the official LaTeX style files. We do not support submission in formats other than LaTeX. Please do not modify the layout given by the style file.

    \n\n\n
    Anonymization Requirements
    \n

    The CLeaR review process is double-blind: reviewers and authors will both stay anonymous to each other during the review process. We use OpenReview to host papers; however, public discussions are not allowed during the review process. The review comments are only visible to program chairs, area chairs, and reviewers with submitted review comments. Papers will be desk-rejected if they contain any information that can violate the double-blind reviewing policy, such as the author names or their affiliations, acknowledgements, or links that can infer any author’s identity or institution. Self-citations are allowed as long as anonymity is preserved. It is up to the author’s discretion how best to preserve anonymity when including self-citations. Possibilities include: leaving out a self-citation, including it but replacing the citation text with “removed for anonymous submission,” or leaving the citation as-is. We recommend leaving in a moderate number of self-citations for published or otherwise well-known work.

    \n\n

    Revisions are allowed in the submission system until the paper submission deadline. Changes will not be allowed afterwards.

    \n\n

    We strongly discourage advertising the preprint on social media or in the press while under submission to CLeaR. Preprints must not be explicitly identified as an CLeaR submission at any time during the review period (i.e., from the abstract submission deadline until the notification of the accept/reject decision).

    \n\n
    Dual Submissions
    \n

    CLeaR does not allow double submissions. Namely, submissions should not have been previously published in or submitted to a journal or the proceedings of another conference at any point during the CLeaR review process. Submissions to workshops or other non-archival venues (without a proceedings) will not be considered as dual submissions. Submissions as extended abstracts with 5 pages or less will not be considered a concurrent submission either. Authors may submit anonymized work to CLeaR that is already available as a preprint (e.g., on arXiv) without citing it. Tech reports (including reports on sites such as arXiv) do not count as prior publication. It is acceptable to have a substantially extended version of the submitted paper under consideration simultaneously for journal publication, so long as the journal version’s planned publication date is after our publication (July 15, 2025, tentatively), and it does not violate the journal's policy, the journal submission does not interfere with CLeaR right to publish the paper, and the situation is clearly described at the time of CLeaR submission. Please describe the situation in the appropriate box on the submission page (and do not include author information in the submission itself, to avoid accidental unblinding). Authors are also allowed to give talks to restricted audiences on the work(s) submitted to CLeaR during the review.

    \n\n

    All accepted papers will be presented at the Conference either as contributed talks or as posters, and will be published in the CLeaR Conference Proceedings in the Journal of Machine Learning Research Workshop and Conference Proceedings series. Papers for talks and posters will be treated equally in publication.

    \n\n
    Confidentiality
    \n

    The reviewers and area-chairs will have access to papers and supplementary materials that are assigned to them.

    \n\n

    The program chairs and workflow chairs will have access to all the papers. Everyone having access to papers and supplementary materials will be instructed to keep them confidential during the review process and delete them after the final decisions.

    \n\n

    Reviews will be visible to area chairs, program chairs, and workflow chairs throughout the process. At any stage of the process, author names will not be known to the reviewers or area chairs, but only visible to program chairs. Reviewer names are visible to the area chair (and program chairs), but the reviewers will not know names of other reviewers.

    \n\n

    Mathias Drton & Biwei Huang
    \n CLeaR 2025 Program Chairs

    \n\n

    Negar Kiyavash & Jin Tian
    \n CLeaR 2025 General Chairs

    \n\n\n
    \n )\n}\n\nexport default CallforPapers2025page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst ACInstruction2025page = () => {\n return (\n
    \n AC Instructions\n

    Thank you for serving as an area chair for CLeaR 2025. As an area chair, your job is to (1) ensure that all the submissions you are assigned have high-quality reviews and good discussions, and (2) write quality meta-reviews and make acceptance decisions. If there is any question, please contact the program chairs at chairs.clear2025@gmail.com. The main tasks of area chairs are listed below.

    \n
    Main tasks
    \n
      \n
    1. Preparation (date TBA)
    2. \n
        \n
      • CLeaR 2025 is using the OpenReview System. Please create your OpenReview profile if you do not have one and make sure it is up to date if you already have an account.
      • \n
      • Please read and agree to CLeaR 2025 codes of conduct and declare the right conflicts of interests.
      • \n
      • In addition to the guidelines below, please be familiar with the reviewer instructions. You will be interacting significantly with reviewers, so please make sure you understand what is expected of them.
      • \n
      \n
    3. Bid on papers (date TBA)
    4. \n
        \n
      • Log into OpenReview and bid on submissions that fall into your area of expertise. Your bidding is an important input to the overall matching results.
      • \n
      \n
    5. Check reviewer assignment (date TBA)
    6. \n
        \n
      • Make sure that every submission in your batch is matched with suitable reviewers before the reviewing process starts.
      • \n
      • If you notice a conflict of interest with a submission that is assigned to you, please contact the program chairs immediately so that the paper will be reassigned.
      • \n
      • You can invite extra reviewers, either in the existing pool or by inviting external reviewers, even after the reviewing process starts.
      • \n
      \n
    7. Make sure all papers have quality reviews (date TBA)
    8. \n
        \n
      • Initial reviews are due Monday, Nov 29. You might need to send multiple reminder emails. If a reviewer is unable to deliver a review, please find a replacement reviewer who is able to do emergency reviews.
      • \n
      • Read all reviews carefully. If a review is substandard, you should ask the reviewer to improve their review.
      • \n
      \n
    9. Discuss with reviewers and authors (date TBA)
    10. \n
        \n
      • As soon as the discussion period starts, initiate and lead a discussion via OpenReview for each submission, and make sure the reviewers engage in the discussion phase.
      • \n
      • Make sure your reviewers read and respond to all author responses.
      • \n
      • A further discussion with the authors will be enabled during the discussion period. Please be cautious when posting a comment about who can see the discussions, only the reviewers or both reviewers and authors.
      • \n
      \n
    11. Make accept/reject decisions: (date TBA)
    12. \n
        \n
      • Write a meta-review that explains your decision (accept or reject) to the authors. Your comments should augment the reviews, and explain how the reviews, author response, and discussion were used to arrive at your decision. Do not dismiss or ignore a review unless you have a good reason for doing so. If the reviewers cannot come to a consensus, you should read the paper carefully and write a detailed meta-review.
      • \n
      • If you cannot make a decision, please reach out to the program chairs at your earliest convenience.
      • \n
      \n
    \n\n

    * Please review the policies in the CLeaR 2025 Call for Papers and Reviewer Instructions.

    \n
    \n )\n}\n\nexport default ACInstruction2025page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst CodeConduct2025page = () => {\n return (\n
    \n Code of Conduct\n
    Purpose
    \n

    At CLeaR 2025, our goal is to create a community and environment that recognizes and respects the intrinsic value of everyone, which is essential for the open exchange of ideas, freedom of thought and expression, and respectful scientific debate at the conference.

    \n
    Who
    \n

    All participants, including but not limited to organizers, reviewers, speakers, sponsors, and volunteers at our Conference and Conference-sponsored social events are required to agree with this Code of Conduct both during an event and on official communication channels, including social media. In particular, sponsors should not use sexual, racial, or other offensive images, events, or other materials. This code applies to both official sponsors and any organization that uses the conference name as a brand as part of its activities during or around the conference.

    \n
    Policy
    \n

    CLeaR is committed to providing all participants with an experience free from harassment, bullying, discrimination and retaliation. This includes offensive comments related to age, gender, gender identity and expression, race, sexual orientation, physical or intellectual disability, physical appearance, body type, ethnicity, religion, politics, technical choices, or any other personal characteristics. No tolerance for bullying, intimidation, personal assault, harassment, continuous interruption of conversations or other activities, and behavior that interferes with the full participation of other participants. This includes sexual harassment, stalking, stalking, harassing photography or recording, inappropriate physical contact, unwelcome sexual attention, vulgar communication, and diminutive characteristics. The policies apply to actual meeting sites and conference venues, including both physical venues, online venues, and official virtual platforms, including but not limited to Openreview comments, video, virtual streaming, Q&A tools. For example, offensive or even threatening comments in Open review are prohibited. Also, Zoom bombing or any virtual activities that have nothing to do with the topic of discussion and that are detrimental to the purpose of the topic or program are not allowed.

    \n
    Action
    \n

    Participants who are asked by any member of the community to stop any improper behavior defined here should immediately comply. Meeting organizers may take further actions at their discretion, including: formally or informally warning offenders, expelling the meeting without refunds, refusing to submit or participate in future CLeaR meetings, reporting the incident to the offender’s local agency or funding agency, or reporting to the local Authorities or law enforcement agencies report the incident. The ''just kidding'' response is unacceptable. If action is taken, an appeal procedure will be provided.

    \n
    Complaint reporting
    \n

    If you have any concerns about possible violation of the policies, please contact the conference chairs (chairs.clear2025@gmail.com) as soon as possible. Reports made during the conference will be responded to within 24 hours; those at other times in less than two weeks. Complaints and violations will be handled with discretion. We are ready and eager to help attendees contact relevant help services, escort them to a safe place, or to otherwise help people who have been harassed feel safe during the meeting. We gratefully accept feedback from the CLeaR community on our policy and actions.

    \n

    A similar version has been used by other conferences, such as ICLR 2020 and UAI 2020.

    \n
    \n\n );\n};\n\nexport default CodeConduct2025page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst CodeConduct2025page = () => {\n return (\n
    \n Conflicts of Interest\n

    This page defines what constitutes a conflict of interest for the CLeaR 2025 review process.

    \n

    If you are an author, reviewer, or area chair, please make sure to create or update your OpenReview profile. You will be asked to declare two types of conflicts---domain conflicts and personal conflicts. Both types are declared by filling out appropriate sections of your OpenReview profile, as described below.

    \n
      \n
    • Domain conflicts (entered in Education & Career History)
    • \n Please ensure that this section accurately represents your domain conflicts for (at least) the last three years. When you enter a domain conflict, none of your submissions will be visible to reviewers or area chairs who have also entered this domain conflict. Only the last three years of your and their Education & Career History will be used.\n
    • Personal conflicts (entered in Advisors, Relations & Conflicts)
    • \n You should also enter your personal conflicts, including (1) family or close personal relationship, (2) Ph.D. advisee/advisor relationship, and (3) current, frequent, or recent collaboration (including internships) within the past three years.\n If you have any questions about special circumstances not discussed above, please contact program chairs at chairs.clear2025@gmail.com.\n
    \n
    \n );\n};\n\nexport default CodeConduct2025page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\n\nconst CameraReady2025page = () => {\n return (\n
    \n Camera-ready Instructions\n

    TBD

    \n\n
    \n );\n};\n\nexport default CameraReady2025page;\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\nimport './RegistrationPage.scss';\n\nconst Registration2025page = () => {\n return (\n
    \n Venue and Registration\n
    Venue
    \n

    CLeaR 2025 will be held at the SwissTech Convention Center, Lausanne, Switzerland, on May 7-9, 2025.\n

    \n
    Registration
    \n

    You can register for CLeaR 2025 through this form. As we have a limited number of available tickets, please register as soon as possible. Please indicate whether you need an invitation letter for a visa application (at the bottom of the registration form).

    \n
    Hotel Accommodations
    \n

    Below please find preferential rates for accommodations.

    \n
      \n
    • \n SwissTech Hotel (on the conference site)\n

      To reserve a room: send an email to reception@sthotel.ch, with the following information:

      \n
        \n
      • Last name, first name
      • \n
      • Exact arrival and departure dates
      • \n
      • Number of persons
      • \n
      • Breakfast yes or no (surcharge of CHF 16.--)
      • \n
      \n

      Reservation code: CLEAR25

      \n

      Room price: CHF 130.--

      \n

      Breakfast: CHF 16.--/person

      \n

      Tourist taxes: CHF 5.50/person

      \n
    • \n
    • \n Starling Hotel Lausanne (about 10-min walk from the conference)\n

      To reserve a room before April 6, 2025: http://bookings.ihotelier.com/bookings.jsp?groupID=4475883&hotelID=73897

      \n

      To reserve a room from April 7, 2025 on: send an email to s.mesnil@shlausanne.ch with code CLEAR25, groupID 4475883

      \n

      Room price per person: CHF 170.--

      \n

      Supplement for double occupancy: CHF 35.--

      \n

      Breakfast buffet and parking included

      \n

      Tourist taxes: CHF 6.--/person

      \n
    • \n
    • \n Hotel Continental Lausanne (in front of the Lausanne train station)\n

      To reserve a room: send an email to booking@hotelcontinental.ch, with the following information:

      \n
        \n
      • Name, first name
      • \n
      • Birth date
      • \n
      • Kind and number of personal document
      • \n
      • Credit card number (for guarantee)
      • \n
      • Exact arrival and departure dates
      • \n
      • Number of persons
      • \n
      \n

      Reservation code: CLEAR25

      \n

      Room price: CHF 195.--

      \n

      Breakfast: included

      \n

      Bus and metro tickets: included

      \n

      Tourist taxes: CHF 6.--/person

      \n

      Baggage taxes: CHF 4.--/baggage (if needed)

      \n
    • \n
    • \n Hotel Moxy Lausanne City (town center, close to lively and fashionable district)\n

      To reserve a room: use this link to book with group rate for EPFL until March 26, 2025 (after this date, the price will be the regular market price). At your arrival, you will have to deposit a credit card number for guarantee.

      \n

      Room price single room: CHF 190.--

      \n

      Room price double room: CHF 205.--

      \n

      Breakfast: included

      \n

      Tourist taxes: CHF 5.50/person

      \n
    • \n
    \n
    \n )\n}\n\nexport default Registration2025page\n\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst PresentationInstruction2025page = () => {\n return (\n
    \n Presentation Instructions\n
    Instructions for orals:
    \n
      \n
    • Oral presentations are 17min + 3min for questions
    • \n
    • Every paper accepted as oral should also prepare and present a poster
    • \n
    \n
    Instructions for posters:
    \n
      \n
    • The poster-boards cover A0 in portrait orientation, please aim for a size between A1 and A0
    • \n
    \n
    Remote presentation:
    \n

    We highly recommend to attend the conference in-person, even more so for presenting your accepted papers.

    \n

    Please contact us at zietld@amazon.com in case you cannot present (oral or poster) in-person (subject: [CleaR 2025 remote]).

    \n
    \n )\n}\n\nexport default PresentationInstruction2025page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst ZoomLink = ({ topic, time, link, meetingId, passcode, localNumberLink }) => (\n
    \n
    {topic}
    \n

    Time: {time}

    \n

    Join Zoom Meeting:

    \n

    {link}

    \n

    Meeting ID: {meetingId}

    \n

    Passcode: {passcode}

    \n

    Find your local number

    \n
    \n);\n\n\nconst papers = [\n {\n title: \"Poster I.1. Sample-Specific Root Causal Inference with Latent Variables\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/CLeaR_EEL_Poster.pdf\"\n },\n {\n title: \"Poster I.2. Causal Discovery for Non-stationary Non-linear Time Series Data Using Just-In-Time Modeling\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/CLeaR2023PosterComplete.pdf\"\n },\n {\n title: \"Poster I.3. Causal Models with Constraints\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/CLeaR_poster-5.pdf\"\n },\n {\n title: \"Poster I.4. Non-parametric identifiability and sensitivity analysis of synthetic control models\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/CLeaR2023_%20Spotify-7.pdf\"\n },\n {\n title: \"Poster I.6. Estimating long-term causal effects from short-term experiments and long-term observational data with unobserved confounding\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/CLeaR2023_Poster_VanGoffrier.pdf\"\n },\n {\n title: \"Poster I.7. Learning Causal Representations of Single Cells via Sparse Mechanism Shift Modeling\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/Learning_Causal_Representations_of_Single_Cells_via_Sparse_Mechanism_Shift_Modeling.pdf\"\n },\n {\n title: \"Poster I.9. Instrumental Processes Using Integrated Covariances\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day1/posterI10.pdf\"\n },\n {\n title: \"Poster II.1. Branch-Price-and-Cut for Causal Discovery\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/poster_1.pdf\"\n },\n {\n title: \"Poster II.3. Beyond the Markov Equivalence Class: Extending Causal Discovery under Latent Confounding\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/poster_CLeaR_2023_final.pdf\"\n },\n {\n title: \"Poster II.4. Learning Conditional Granger Causal Temporal Networks\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/Learning_Conditional_Granger_Causal_Temporal_Networks.pdf\"\n },\n {\n title: \"Poster II.5. Practical Algorithms for Orientations of Partially Directed Graphical Models\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/Poster_Practical_Algorithms_for_Orientations_of_Partially_Directed_Graphical_Models.pdf\"\n },\n {\n title: \"Poster II.6. Enhancing Causal Discovery from Robot Sensor Data in Dynamic Scenarios\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/Postercopy.pdf\"\n },\n {\n title: \"Poster II.7. Leveraging Causal Graphs for Blocking in Randomized Experiments\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/CleaR2023-poster.pdf\"\n },\n {\n title: \"Poster II.8. Generalizing Clinical Trials with Convex Hulls\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/CLeaR_OCH_Poster.pdf\"\n },\n {\n title: \"Poster II.9. Backtracking Counterfactuals\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/Poster_Backtracking_Counterfactuals.pdf\"\n },\n {\n title: \"Poster II.10. Stochastic Causal Programming for Bounding Treatment Effects\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day2/CLeaR_poster_SCP.pdf\"\n },\n {\n title: \"Poster III.1. A Meta-Reinforcement Learning Algorithm for Causal Discovery\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/23_meta_rl_cd_clear_poster.pdf\"\n },\n {\n title: \"Poster III.2. Causal Inference Despite Limited Global Confounding via Mixture Models\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/CLeaR_2023_Poster_Vertical_Final.pdf\"\n },\n {\n title: \"Poster III.3. Causal Triplet: An Open Challenge for Intervention-centric Causal Representation Learning\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/poster_causal_triplet.pdf\"\n },\n {\n title: \"Poster III.4. Image-based Treatment Effect Heterogeneity\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/ImageHeterogeneity_clear.pdf\"\n },\n {\n title: \"Poster III.5. Causal Learning through Deliberate Undersampling\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/CLeaR_poster_Soloveva.pdf\"\n },\n {\n title: \"Poster III.6. Influence-Aware Attention for Multivariate Temporal Point Processes\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/Influence-Aware_Attention_for_Multivariate_Temporal_Point_Processes_CLeaR23.pdf\"\n },\n {\n title: \"Poster III.7. Evaluating Temporal Observation-Based Causal Discovery Techniques Applied to Road Driver Behaviour\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/CLeaR23_causaldiscovery_poster.pdf\"\n },\n {\n title: \"Poster III.8. Directed Graphical Models and Causal Discovery for Zero-Inflated Data\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/ZeroDAG_poster_final.pdf\"\n },\n {\n title: \"Poster III.10. Local Dependence Graphs for Discrete Time Processes\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day3/clear-poster-niemiro_rajkowski.pdf\"\n },\n {\n title: \"Poster IV.1. On Discovery of Local Independence over Continuous Variables via Neural Contextual Decomposition\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/CSSI_CLeaR2023_poster.pdf\"\n },\n {\n title: \"Poster IV.3. Can Active Sampling Reduce Causal Confusion in Offline Reinforcement Learning?\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/CleaR.pdf\"\n },\n {\n title: \"Poster IV.4. Causal Inference with Non-IID Data under Model Uncertainty\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/poster_3.pdf\"\n },\n {\n title: \"Poster IV.5. Factual Observation Based Heterogeneity Learning for Counterfactual Prediction\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/poster.pdf\"\n },\n {\n title: \"Poster IV.6. On the Interventional Kullback-Leibler Divergence\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/On_the_IKL_div_poster.pdf\"\n },\n {\n title: \"Poster IV.7. Unsupervised Object Learning via Common Fate\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/poster_tangemann_et_al.pdf\"\n },\n {\n title: \"Poster IV.8. Distinguishing Cause from Effect on Categorical Data: The Uniform Channel Model\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/Figueiredo_CleaR_2023_Poster.pdf\"\n },\n {\n title: \"Poster IV.9. Jointly Learning Consistent Causal Abstractions Over Multiple Interventional Distributions\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/poster1.pdf\"\n },\n {\n title: \"Poster IV.10. Causal Abstraction with Soft Interventions\",\n downloadLink: \"https://www.cclear.cc/2023/Posters/Day4/CausalAbsractionWithSoftInterventions.pdf\"\n },\n];\n\nclass ZoomLinks extends React.Component {\n constructor(props) {\n super(props);\n this.state = {\n isAuthenticated: false,\n password: \"\",\n };\n this.handleSubmit = this.handleSubmit.bind(this);\n this.handleChange = this.handleChange.bind(this);\n }\n\n handleSubmit(event) {\n event.preventDefault();\n // This is a dummy password, replace this with your own password.\n const correctPassword = \"causality2023_p\";\n if (this.state.password === correctPassword) {\n this.setState({ isAuthenticated: true });\n } else {\n alert(\"Incorrect password\");\n }\n }\n\n handleChange(event) {\n this.setState({ password: event.target.value });\n }\n\n render() {\n if (this.state.isAuthenticated) {\n return (\n
    \n

    Registered participants can attend keynotes and oral presentations (see schedule) remotely. Please connect to this Zoom meeting:

    \n \n

    This link is used for all keynotes and orals. Note that in-person poster sessions, the townhall, panel discussion, and social activities will not be streamed.

    \n

    When participating remotely, please mute yourself (except when asking questions). Feel free to ask questions either in the chat or by raising your hand in Zoom and asking them over video.

    \n

    Virtual posters can be found below.

    \n \n \n \n \n \n \n \n \n {papers.map((paper) => (\n \n \n \n \n ))}\n \n
    Paper titleDownload link
    {paper.title}Download
    \n
    \n );\n } else {\n return (\n
    \n
    Enter password to access Zoom links and posters
    \n \n \n
    \n );\n }\n }\n}\n\n\n\nconst OnlineSchedulepage = () => {\n return (\n
    \n Online Schedule\n \n
    \n )\n}\n\nexport default OnlineSchedulepage\n\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst SocialActivities2025page = () => {\n return (\n
    \n Social Activities\n\n

    Tue, 11 Apr: Bar Night

    \n Casual pub evening, starting from 20:00 at Freistil Taproom.\n
      \n
    • For those interested, meet already at 18:30 at Neckarmüller Brewery for a beer and some dinner.
    • \n
    • They also serve a limited selection of plates at Freistil.
    • \n
    • Later in the evening, we may crawl to a couple of other bars in the center.
    • \n
    \n

    Wed, 12 Apr: Conference Dinner

    \n Official conference dinner, starting from 18:30 at Museum (upstairs from 1821).\n
      \n
    • 18:30--19:30: Reception with bar (drinks paid directly by participants)
    • \n
    • From 19:30: Buffet style dinner
    • \n
    • Dress code: wear whatever you are comfortable with
    • \n
    • You may need to pay for alcoholic beverages during dinner
    • \n
    \n

    Thu, 13 Apr: Hike

    \n Group hike to the neighbouring village Bebenhausen.\n
      \n
    • We will meet at and leave from the conference venue at 14:30.
    • \n
    • The destination is the 12th century monastery in Bebenhausen.
    • \n
    • We recommend wearing shoes suitable for hiking.
    • \n
    • Duration: ca 2.5h (45min to Bebenhausen, mostly downhill + time to walk around + 1h back, mostly uphill incl. a somewhat steep part).
    • \n
    • Whoever wants stays to explore the monastery and the palace (tickets on their own cost) need to find their way back.\n The rest of the people continue hiking on this trail.\n
    • \n
    • \n Recommendations: Download the outdoor active app on your phone and save the trail.\n We plan to move as a group but just in case someone moves apart.\n
    • \n
    \n If you have questions about any of the above activities, just ask the social co-chairs Atalanti and Julius during the conference.\n\n
    \n )\n}\n\nexport default SocialActivities2025page\n","import React from \"react\";\nimport Title from \"../../../components/Title/Title\";\n\nconst StudentVolunteers2025page = () => {\n return (\n
    \n Student Volunteers\n

    TBD

    \n
    \n )\n}\n\nexport default StudentVolunteers2025page\n","import React from 'react'\nimport { Switch, Route, Redirect } from 'react-router'\n\nimport ImportantDates2022page from '../../pages/2022/ImportantDatesPage'\nimport AcceptedPapers2022page from '../../pages/2022/AcceptedPapersPage'\nimport OrganizingCommittee2022page from '../../pages/2022/OrganizingCommitteePage'\nimport ReviewerInstruction2022page from '../../pages/2022/ReviewerInstructionPage'\nimport AdvisoryBoard2022page from '../../pages/2022/AdvisoryBoardPage'\nimport Sponsors2022page from '../../pages/2022/SponsorsPage'\nimport AreaChair2022page from '../../pages/2022/AreaChairPage'\nimport ProgramCommittee2022page from '../../pages/2022/ProgramCommitteePage'\nimport CLeaR2022Page from '../../pages/2022/CLeaRPage'\nimport FullAgenda2022page from '../../pages/2022/FullAgendaPage'\nimport KeynoteSpeaker2022page from '../../pages/2022/KeynoteSpeakerPage'\nimport CallforPapers2022page from '../../pages/2022/CallforPapersPage'\nimport ACInstruction2022page from '../../pages/2022/ACInstructionPage'\nimport CodeofConduct2022page from '../../pages/2022/CodeConductPage'\nimport CoI2022page from '../../pages/2022/CoIPage'\nimport CameraReady2022page from '../../pages/2022/CameraReady'\nimport Registration2022 from '../../pages/2022/RegistrationPage'\n\nimport ImportantDates2023page from '../../pages/2023/ImportantDatesPage'\nimport AcceptedDatasets2023page from '../../pages/2023/AcceptedDatasetsPage'\nimport AcceptedPapers2023page from '../../pages/2023/AcceptedPapersPage'\nimport OrganizingCommittee2023page from '../../pages/2023/OrganizingCommitteePage'\nimport ReviewerInstruction2023page from '../../pages/2023/ReviewerInstructionPage'\nimport AdvisoryBoard2023page from '../../pages/2023/AdvisoryBoardPage'\nimport Sponsors2023page from '../../pages/2023/SponsorsPage'\nimport AreaChair2023page from '../../pages/2023/AreaChairPage'\nimport ProgramCommittee2023page from '../../pages/2023/ProgramCommitteePage'\nimport CLeaR2023Page from '../../pages/2023/CLeaRPage'\nimport FullAgenda2023page from '../../pages/2023/FullAgendaPage'\nimport KeynoteSpeaker2023page from '../../pages/2023/KeynoteSpeakerPage'\nimport CallforPapers2023page from '../../pages/2023/CallforPapersPage'\nimport CallforDatasets2023page from '../../pages/2023/CallforDatasetsPage'\nimport ACInstruction2023page from '../../pages/2023/ACInstructionPage'\nimport CodeofConduct2023page from '../../pages/2023/CodeConductPage'\nimport CoI2023page from '../../pages/2023/CoIPage'\nimport CameraReady2023page from '../../pages/2023/CameraReady'\nimport Registration2023 from '../../pages/2023/RegistrationPage'\nimport PresentationInstruction2023page from '../../pages/2023/PresentationInstructionPage'\nimport OnlineSchedule2023page from '../../pages/2023/OnlineSchedulePage'\nimport SocialActivities2023page from '../../pages/2023/SocialActivitiesPage'\nimport StudentVolunteers2023page from '../../pages/2023/StudentVolunteersPage'\n\nimport ImportantDates2024page from '../../pages/2024/ImportantDatesPage'\nimport AcceptedDatasets2024page from '../../pages/2024/AcceptedDatasetsPage'\nimport AcceptedPapers2024page from '../../pages/2024/AcceptedPapersPage'\nimport OrganizingCommittee2024page from '../../pages/2024/OrganizingCommitteePage'\nimport ReviewerInstruction2024page from '../../pages/2024/ReviewerInstructionPage'\nimport AdvisoryBoard2024page from '../../pages/2024/AdvisoryBoardPage'\nimport Sponsors2024page from '../../pages/2024/SponsorsPage'\nimport AreaChair2024page from '../../pages/2024/AreaChairPage'\nimport ProgramCommittee2024page from '../../pages/2024/ProgramCommitteePage'\nimport CLeaR2024Page from '../../pages/2024/CLeaRPage'\nimport FullAgenda2024page from '../../pages/2024/FullAgendaPage'\nimport KeynoteSpeaker2024page from '../../pages/2024/KeynoteSpeakerPage'\nimport CallforPapers2024page from '../../pages/2024/CallforPapersPage'\nimport ACInstruction2024page from '../../pages/2024/ACInstructionPage'\nimport CodeofConduct2024page from '../../pages/2024/CodeConductPage'\nimport CoI2024page from '../../pages/2024/CoIPage'\nimport CameraReady2024page from '../../pages/2024/CameraReady'\nimport Registration2024 from '../../pages/2024/RegistrationPage'\nimport PresentationInstruction2024page from '../../pages/2024/PresentationInstructionPage'\nimport OnlineSchedule2024page from '../../pages/2024/OnlineSchedulePage'\nimport SocialActivities2024page from '../../pages/2024/SocialActivitiesPage'\nimport StudentVolunteers2024page from '../../pages/2024/StudentVolunteersPage'\n\n\nimport ImportantDates2025page from '../../pages/2025/ImportantDatesPage'\nimport AcceptedDatasets2025page from '../../pages/2025/AcceptedDatasetsPage'\nimport AcceptedPapers2025page from '../../pages/2025/AcceptedPapersPage'\nimport OrganizingCommittee2025page from '../../pages/2025/OrganizingCommitteePage'\nimport ReviewerInstruction2025page from '../../pages/2025/ReviewerInstructionPage'\nimport AdvisoryBoard2025page from '../../pages/2025/AdvisoryBoardPage'\nimport Sponsors2025page from '../../pages/2025/SponsorsPage'\nimport AreaChair2025page from '../../pages/2025/AreaChairPage'\nimport ProgramCommittee2025page from '../../pages/2025/ProgramCommitteePage'\nimport CLeaR2025Page from '../../pages/2025/CLeaRPage'\nimport FullAgenda2025page from '../../pages/2025/FullAgendaPage'\nimport KeynoteSpeaker2025page from '../../pages/2025/KeynoteSpeakerPage'\nimport CallforPapers2025page from '../../pages/2025/CallforPapersPage'\nimport ACInstruction2025page from '../../pages/2025/ACInstructionPage'\nimport CodeofConduct2025page from '../../pages/2025/CodeConductPage'\nimport CoI2025page from '../../pages/2025/CoIPage'\nimport CameraReady2025page from '../../pages/2025/CameraReady'\nimport Registration2025 from '../../pages/2025/RegistrationPage'\nimport PresentationInstruction2025page from '../../pages/2025/PresentationInstructionPage'\nimport OnlineSchedule2025page from '../../pages/2025/OnlineSchedulePage'\nimport SocialActivities2025page from '../../pages/2025/SocialActivitiesPage'\nimport StudentVolunteers2025page from '../../pages/2025/StudentVolunteersPage'\n\n\nimport './Content.scss'\n\n\nconst Content = () => {\n return (\n
    \n
    \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n \n \n
    \n
    \n )\n}\n\nexport default Content\n","import Header from '../components/Header/Header'\nimport NavBar from '../components/NavBar/NavBar';\nimport Content from '../components/Content/Content';\nimport './App.scss';\n\nfunction App() {\n return (\n
    \n
    \n
    \n
    \n \n \n
    \n
    \n
    \n );\n}\n\nexport default App;\n","import 'bootstrap/dist/css/bootstrap.min.css';\nimport 'jquery';\nimport 'popper.js';\nimport 'bootstrap/dist/js/bootstrap.bundle.min';\nimport React from 'react';\nimport ReactDOM from 'react-dom';\nimport { BrowserRouter } from 'react-router-dom';\nimport './index.scss';\nimport App from './app/App';\n\nReactDOM.render(\n \n \n \n \n ,\n document.getElementById('root')\n);\n\n\n"],"sourceRoot":""}