{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,2,18]],"date-time":"2026-02-18T23:35:11Z","timestamp":1771457711146,"version":"3.50.1"},"reference-count":38,"publisher":"Elsevier BV","license":[{"start":{"date-parts":[[2024,8,1]],"date-time":"2024-08-01T00:00:00Z","timestamp":1722470400000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/2.zoppoz.workers.dev:443\/https\/www.elsevier.com\/tdm\/userlicense\/1.0\/"},{"start":{"date-parts":[[2024,8,1]],"date-time":"2024-08-01T00:00:00Z","timestamp":1722470400000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/2.zoppoz.workers.dev:443\/https\/www.elsevier.com\/legal\/tdmrep-license"},{"start":{"date-parts":[[2024,8,1]],"date-time":"2024-08-01T00:00:00Z","timestamp":1722470400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/2.zoppoz.workers.dev:443\/https\/doi.org\/10.15223\/policy-017"},{"start":{"date-parts":[[2024,8,1]],"date-time":"2024-08-01T00:00:00Z","timestamp":1722470400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/2.zoppoz.workers.dev:443\/https\/doi.org\/10.15223\/policy-037"},{"start":{"date-parts":[[2024,8,1]],"date-time":"2024-08-01T00:00:00Z","timestamp":1722470400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/2.zoppoz.workers.dev:443\/https\/doi.org\/10.15223\/policy-012"},{"start":{"date-parts":[[2024,8,1]],"date-time":"2024-08-01T00:00:00Z","timestamp":1722470400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/2.zoppoz.workers.dev:443\/https\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2024,8,1]],"date-time":"2024-08-01T00:00:00Z","timestamp":1722470400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/2.zoppoz.workers.dev:443\/https\/doi.org\/10.15223\/policy-004"}],"content-domain":{"domain":["elsevier.com","sciencedirect.com"],"crossmark-restriction":true},"short-container-title":["Computer Communications"],"published-print":{"date-parts":[[2024,8]]},"DOI":"10.1016\/j.comcom.2024.05.016","type":"journal-article","created":{"date-parts":[[2024,5,22]],"date-time":"2024-05-22T15:17:56Z","timestamp":1716391076000},"page":"242-253","update-policy":"https:\/\/2.zoppoz.workers.dev:443\/https\/doi.org\/10.1016\/elsevier_cm_policy","source":"Crossref","is-referenced-by-count":2,"special_numbering":"C","title":["UMPL- VINS: Generalized SLAM for multi-scene metaverse applications"],"prefix":"10.1016","volume":"224","author":[{"given":"Hao","family":"Jiang","sequence":"first","affiliation":[]},{"given":"Yilin","family":"Shang","sequence":"additional","affiliation":[]},{"given":"Shan","family":"Xue","sequence":"additional","affiliation":[]},{"given":"Dongsheng","family":"Guo","sequence":"additional","affiliation":[]},{"given":"Weidong","family":"Zhang","sequence":"additional","affiliation":[]}],"member":"78","reference":[{"key":"10.1016\/j.comcom.2024.05.016_bib1","article-title":"Evolution of slam: toward the robust-perception of autonomy","author":"Udugama","year":"2023","journal-title":"arXiv - CS - Robotics"},{"key":"10.1016\/j.comcom.2024.05.016_bib2","doi-asserted-by":"crossref","DOI":"10.1016\/j.engappai.2022.104992","article-title":"A review of visual slam methods for autonomous driving vehicles","volume":"114","author":"Cheng","year":"2022","journal-title":"Eng. Appl. Artif. Intell."},{"key":"10.1016\/j.comcom.2024.05.016_bib3","article-title":"Research on robot positioning and navigation algorithm based on slam","volume":"3340529","author":"Dai","year":"2022","journal-title":"Mobile Comput"},{"key":"10.1016\/j.comcom.2024.05.016_bib4","doi-asserted-by":"crossref","DOI":"10.1016\/j.robot.2022.104342","article-title":"A review of quadrotor uav: control and slam methodologies ranging from conventional to innovative approaches","volume":"161","author":"Sonugur","year":"2023","journal-title":"Robot. Autonom. Syst."},{"key":"10.1016\/j.comcom.2024.05.016_bib5","series-title":"2022 IEEE\/RSJ International Conference on Intelligent Robots and Systems (IROS), Kyoto, Japan","first-page":"8948","article-title":"EPAR: an efficient and Privacy-aware augmented reality framework for indoor location-based services","author":"Peng","year":"2022"},{"issue":"6","key":"10.1016\/j.comcom.2024.05.016_bib6","doi-asserted-by":"crossref","first-page":"4351","DOI":"10.1109\/JIOT.2018.2863688","article-title":"Indoor floor plan construction through sensing data collected from smartphones","volume":"5","author":"Peng","year":"2018","journal-title":"IEEE Internet Things J."},{"key":"10.1016\/j.comcom.2024.05.016_bib7","series-title":"2016 IEEE\/ACM 24th International Symposium on Quality of Service (IWQoS), Beijing, China","first-page":"1","article-title":"Smartphone-assisted smooth live video broadcast on wearable cameras","author":"Li","year":"2016"},{"key":"10.1016\/j.comcom.2024.05.016_bib8","doi-asserted-by":"crossref","first-page":"33","DOI":"10.1016\/j.comcom.2016.08.011","article-title":"Smartphone-assisted energy efficient data communication for wearable devices","volume":"105","author":"Li","year":"2017","journal-title":"Comput. Commun."},{"key":"10.1016\/j.comcom.2024.05.016_bib9","article-title":"Visual-inertial navigation: a concise review","author":"Huang","year":"2019","journal-title":"arXiv - CS - Robotics"},{"key":"10.1016\/j.comcom.2024.05.016_bib10","series-title":"Proceedings 2007 IEEE International Conference on Robotics and Automation, Rome, Italy","first-page":"3565","article-title":"A multi-state constraint kalman filter for vision-aided inertial navigation","author":"Mourikis","year":"2007"},{"issue":"4","key":"10.1016\/j.comcom.2024.05.016_bib11","doi-asserted-by":"crossref","first-page":"1004","DOI":"10.1109\/TRO.2018.2853729","article-title":"VINS-Mono: a robust and versatile monoc- ular visual-inertial state estimator","volume":"34","author":"Qin","year":"2018","journal-title":"IEEE Trans. Robot."},{"issue":"11","key":"10.1016\/j.comcom.2024.05.016_bib12","doi-asserted-by":"crossref","first-page":"2701","DOI":"10.1109\/TMC.2018.2879933","article-title":"When urban safety index inference meets location-based data","volume":"18","author":"Peng","year":"2019","journal-title":"IEEE Trans. Mobile Comput."},{"issue":"5","key":"10.1016\/j.comcom.2024.05.016_bib13","doi-asserted-by":"crossref","first-page":"155","DOI":"10.1109\/MWC.001.1900559","article-title":"Real-time cache-aided route planning based on mobile edge computing","volume":"27","author":"Yao","year":"2020","journal-title":"IEEE Wireless Commun."},{"key":"10.1016\/j.comcom.2024.05.016_bib14","series-title":"2017 IEEE International Conference on Communications (ICC), Paris, France","first-page":"1","article-title":"U-safety: urban safety analysis in a smart city","author":"Peng","year":"2017"},{"issue":"3","key":"10.1016\/j.comcom.2024.05.016_bib15","doi-asserted-by":"crossref","first-page":"734","DOI":"10.1109\/TRO.2019.2899783","article-title":"PL-SLAM: a stereo SLAM system through the combination of points and line segments","volume":"35","author":"Gomez-Ojeda","year":"2019","journal-title":"IEEE Trans. Robot."},{"key":"10.1016\/j.comcom.2024.05.016_bib16","series-title":"2011 IEEE International Conference on Robotics and Automation, Shanghai, China","first-page":"1497","article-title":"Building a partial 3D line-based map using a monocular SLAM","author":"Zhang","year":"2011"},{"key":"10.1016\/j.comcom.2024.05.016_bib17","series-title":"British Machine Vision Conference, Edinburgh, Britain","first-page":"17","article-title":"Real-time monocular SLAM with straight lines","author":"Smith","year":"2006"},{"issue":"4","key":"10.1016\/j.comcom.2024.05.016_bib18","first-page":"1364","article-title":"StructSLAM: visual SLAM with building structure lines","volume":"64","author":"Zhou","year":"2015","journal-title":"IEEE Trans. Robot."},{"key":"10.1016\/j.comcom.2024.05.016_bib19","series-title":"2017 IEEE International Conference on Multisensor Fusion and Integration for Intelligent Systems (MFI), Daegu, Korea (South)","first-page":"494","article-title":"Line-based monocular graph SLAM","author":"Ruifang","year":"2017"},{"key":"10.1016\/j.comcom.2024.05.016_bib20","series-title":"PL-VINS: Real-Time Monocular Visual-Inertial SLAM with Point and Line","author":"Fu","year":"2020"},{"key":"10.1016\/j.comcom.2024.05.016_bib21","series-title":"Proceedings of the European Conference on Computer Vision","first-page":"516","article-title":"Good line cutting: towards accurate pose tracking of line-assisted VO\/VSLAM","author":"Zhao","year":"2018"},{"issue":"5","key":"10.1016\/j.comcom.2024.05.016_bib22","doi-asserted-by":"crossref","first-page":"1416","DOI":"10.1109\/TRO.2021.3061403","article-title":"Line flow based simultaneous localization and mapping","volume":"37","author":"Wang","year":"2021","journal-title":"IEEE Trans. Robot."},{"issue":"6","key":"10.1016\/j.comcom.2024.05.016_bib23","doi-asserted-by":"crossref","first-page":"45","DOI":"10.1177\/027836498800700605","article-title":"Building, registrating, and fusing noisy visual maps","volume":"7","author":"Ayache","year":"1988","journal-title":"Int. J. Robot Res."},{"key":"10.1016\/j.comcom.2024.05.016_bib24","doi-asserted-by":"crossref","first-page":"663","DOI":"10.1007\/978-3-319-03413-3_49","article-title":"LineSLAM: visual real time localization using lines and UKF","volume":"1","author":"Perdices","year":"2014","journal-title":"ROBOT2013: First Iberian Robotics Conference: Advances in Robotics"},{"key":"10.1016\/j.comcom.2024.05.016_bib25","series-title":"2017 IEEE International Conference on Robotics and Automation (ICRA), Singapore","first-page":"4503","article-title":"PL-SLAM: real-time monocular visual SLAM with points and lines","author":"Pumarola","year":"2017"},{"issue":"4","key":"10.1016\/j.comcom.2024.05.016_bib26","article-title":"PL-VIO: tightly-coupled monocular visual-inertial odometry using point and line features","volume":"18","author":"Yijia","year":"2018","journal-title":"Sensors"},{"issue":"4","key":"10.1016\/j.comcom.2024.05.016_bib27","doi-asserted-by":"crossref","first-page":"7113","DOI":"10.1109\/LRA.2021.3097052","article-title":"DPLVO: direct point-line monocular visual odometry","volume":"6","author":"Zhou","year":"2021","journal-title":"IEEE Rob. Autom. Lett."},{"key":"10.1016\/j.comcom.2024.05.016_bib28","series-title":"2022 International Conference on Robotics and Automation (ICRA), Philadelphia, PA, USA","first-page":"7559","article-title":"EDPLVO: efficient direct point-line visual odometry","author":"Zhou","year":"2022"},{"issue":"7","key":"10.1016\/j.comcom.2024.05.016_bib29","doi-asserted-by":"crossref","first-page":"794","DOI":"10.1016\/j.jvcir.2013.05.006","article-title":"An efficient and robust line segment matching approach based on LBD descriptor and pairwise geometric consistency","volume":"24","author":"Zhang","year":"2013","journal-title":"J. Vis. Commun. Image Represent."},{"key":"10.1016\/j.comcom.2024.05.016_bib30","doi-asserted-by":"crossref","first-page":"156361","DOI":"10.1109\/ACCESS.2021.3130177","article-title":"A new full-reference image quality metric for motion blur profile characterization","volume":"9","author":"Abdullah-Al-Mamun","year":"2021","journal-title":"IEEE Access"},{"issue":"10","key":"10.1016\/j.comcom.2024.05.016_bib31","doi-asserted-by":"crossref","first-page":"1157","DOI":"10.1177\/0278364915620033","article-title":"The EuRoc micro aerial vehicle datasets","volume":"35","author":"Burri","year":"2016","journal-title":"Int. J. Robot Res."},{"key":"10.1016\/j.comcom.2024.05.016_bib32","series-title":"2018 IEEE\/RSJ International Conference on Intelligent Robots and Systems (IROS), Madrid, Spain","first-page":"1680","article-title":"The TUMVI benchmarkfor evaluating visual-inertial odometry","author":"Schubert","year":"2018"},{"issue":"1","key":"10.1016\/j.comcom.2024.05.016_bib33","doi-asserted-by":"crossref","DOI":"10.3390\/electronics9010198","article-title":"Real-time image stabilization method based on optical flow and binary point feature matching","author":"Deng","year":"2020","journal-title":"Electronics"},{"key":"10.1016\/j.comcom.2024.05.016_bib34","doi-asserted-by":"crossref","first-page":"333","DOI":"10.1007\/s00371-023-02785-2","article-title":"Coarse-to-fine blind image deblurring based on K-means clustering","volume":"40","author":"Eqtedaei","year":"2024","journal-title":"Vis. Comput."},{"key":"10.1016\/j.comcom.2024.05.016_bib35","article-title":"Signal-to-Noise ratio comparison of several filters against phantom image","author":"Hameed","year":"2022","journal-title":"Journal of Healthcare Engineering"},{"key":"10.1016\/j.comcom.2024.05.016_bib36","doi-asserted-by":"crossref","first-page":"128","DOI":"10.1016\/j.neucom.2022.02.067","article-title":"High-resolution optical flow and frame-recurrent network for video super-resolution and deblurring","volume":"489","author":"Fang","year":"2022","journal-title":"Neurocomputing"},{"issue":"11","key":"10.1016\/j.comcom.2024.05.016_bib37","doi-asserted-by":"crossref","first-page":"1231","DOI":"10.1177\/0278364913491297","article-title":"Vision meets robotics: the KITTI dataset","volume":"32","author":"Geiger","year":"2013","journal-title":"Int. J. Robot Res."},{"key":"10.1016\/j.comcom.2024.05.016_bib38","series-title":"2017 IEEE International Conference on Robotics and Automation (ICRA), Singapore","first-page":"3847","article-title":"PennCOSYVIO: a challenging visual inertial odometry benchmark","author":"Pfrommer","year":"2017"}],"container-title":["Computer Communications"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/2.zoppoz.workers.dev:443\/https\/api.elsevier.com\/content\/article\/PII:S0140366424001890?httpAccept=text\/xml","content-type":"text\/xml","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/2.zoppoz.workers.dev:443\/https\/api.elsevier.com\/content\/article\/PII:S0140366424001890?httpAccept=text\/plain","content-type":"text\/plain","content-version":"vor","intended-application":"text-mining"}],"deposited":{"date-parts":[[2024,7,14]],"date-time":"2024-07-14T22:34:12Z","timestamp":1720996452000},"score":1,"resource":{"primary":{"URL":"https:\/\/2.zoppoz.workers.dev:443\/https\/linkinghub.elsevier.com\/retrieve\/pii\/S0140366424001890"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,8]]},"references-count":38,"alternative-id":["S0140366424001890"],"URL":"https:\/\/2.zoppoz.workers.dev:443\/https\/doi.org\/10.1016\/j.comcom.2024.05.016","relation":{},"ISSN":["0140-3664"],"issn-type":[{"value":"0140-3664","type":"print"}],"subject":[],"published":{"date-parts":[[2024,8]]},"assertion":[{"value":"Elsevier","name":"publisher","label":"This article is maintained by"},{"value":"UMPL- VINS: Generalized SLAM for multi-scene metaverse applications","name":"articletitle","label":"Article Title"},{"value":"Computer Communications","name":"journaltitle","label":"Journal Title"},{"value":"https:\/\/2.zoppoz.workers.dev:443\/https\/doi.org\/10.1016\/j.comcom.2024.05.016","name":"articlelink","label":"CrossRef DOI link to publisher maintained version"},{"value":"article","name":"content_type","label":"Content Type"},{"value":"\u00a9 2024 Published by Elsevier B.V.","name":"copyright","label":"Copyright"}]}}