[ { "TAR @ FAR=0.01": 97.6, "code_links": [], "date": "2017-12-01", "date2": 20171201, "model": "Dual-Agent GANs", "paper": { "title": "Dual-Agent GANs for Photorealistic and Identity Preserving Profile Face Synthesis", "url": "https://cknow.io/lib/02a0362cb4a0114c" }, "paper_data_uoa": "02a0362cb4a0114c" }, { "TAR @ FAR=0.01": 97.2, "code_links": [], "date": "2018-10-23", "date2": 20181023, "model": "SE-GV-4-g1", "paper": { "title": "GhostVLAD for set-based face recognition", "url": "https://cknow.io/lib/caadaa013b449fbb" }, "paper_data_uoa": "caadaa013b449fbb" }, { "TAR @ FAR=0.01": 97, "code_links": [], "date": "2017-03-28", "date2": 20170328, "model": "L2-constrained softmax loss", "paper": { "title": "L2-constrained Softmax Loss for Discriminative Face Verification", "url": "https://cknow.io/lib/d4aa6e404ca45970" }, "paper_data_uoa": "d4aa6e404ca45970" }, { "TAR @ FAR=0.01": 94.4, "code_links": [ { "title": "penincillin/DREAM", "url": "https://github.com/penincillin/DREAM" } ], "date": "2018-03-02", "date2": 20180302, "model": "Deep Residual Equivariant Mapping", "paper": { "title": "Pose-Robust Face Recognition via Deep Residual Equivariant Mapping", "url": "https://cknow.io/lib/5a77ee0d239a99fe" }, "paper_data_uoa": "5a77ee0d239a99fe" }, { "TAR @ FAR=0.01": 94.1, "code_links": [], "date": "2016-03-17", "date2": 20160317, "model": "NAN", "paper": { "title": "Neural Aggregation Network for Video Face Recognition", "url": "https://cknow.io/lib/d9ff38c2e0af4831" }, "paper_data_uoa": "d9ff38c2e0af4831" }, { "TAR @ FAR=0.01": 93.9, "code_links": [], "date": "2016-03-12", "date2": 20160312, "model": "Template adaptation", "paper": { "title": "Template Adaptation for Face Verification and Identification", "url": "https://cknow.io/lib/645a82c3143da8a8" }, "paper_data_uoa": "645a82c3143da8a8" }, { "TAR @ FAR=0.01": 92.2, "code_links": [], "date": "2016-11-03", "date2": 20161103, "model": "All-in-one CNN", "paper": { "title": "An All-In-One Convolutional Neural Network for Face Analysis", "url": "https://cknow.io/lib/fedf47d194cca7e5" }, "paper_data_uoa": "fedf47d194cca7e5" }, { "TAR @ FAR=0.01": 90.1, "code_links": [ { "title": "fengju514/Face-Pose-Net", "url": "https://github.com/fengju514/Face-Pose-Net" }, { "title": "fengju514/Expression-Net", "url": "https://github.com/fengju514/Expression-Net" } ], "date": "2017-08-24", "date2": 20170824, "model": "FPN", "paper": { "title": "FacePoseNet: Making a Case for Landmark-Free Face Alignment", "url": "https://cknow.io/lib/f0094c46a7be82d8" }, "paper_data_uoa": "f0094c46a7be82d8" }, { "TAR @ FAR=0.01": 90, "code_links": [ { "title": "Ananaskelly/TPE", "url": "https://github.com/Ananaskelly/TPE" } ], "date": "2016-04-19", "date2": 20160419, "model": "Triplet probabilistic embedding", "paper": { "title": "Triplet Probabilistic Embedding for Face Verification and Clustering", "url": "https://cknow.io/lib/ebdff964f65afcd8" }, "paper_data_uoa": "ebdff964f65afcd8" }, { "TAR @ FAR=0.01": 88.6, "code_links": [], "date": "2016-03-23", "date2": 20160323, "model": "Synthesis as data augmentation", "paper": { "title": "Do We Really Need to Collect Millions of Faces for Effective Face Recognition?", "url": "https://cknow.io/lib/fa7ca646168c333b" }, "paper_data_uoa": "fa7ca646168c333b" }, { "TAR @ FAR=0.01": 83.8, "code_links": [], "date": "2015-08-07", "date2": 20150807, "model": "DCNN", "paper": { "title": "Unconstrained Face Verification using Deep CNN Features", "url": "https://cknow.io/lib/fca853dd6371265b" }, "paper_data_uoa": "fca853dd6371265b" }, { "TAR @ FAR=0.01": 78.7, "code_links": [], "date": "2016-03-23", "date2": 20160323, "model": "Deep multi-pose representations", "paper": { "title": "Face Recognition Using Deep Multi-Pose Representations", "url": "https://cknow.io/lib/5a8eb2e1a982437d" }, "paper_data_uoa": "5a8eb2e1a982437d" }, { "TAR @ FAR=0.01": 73.3, "code_links": [], "date": "2015-07-26", "date2": 20150726, "model": "Deep CNN + COTS matcher", "paper": { "title": "Face Search at Scale: 80 Million Gallery", "url": "https://cknow.io/lib/f4e2fb4d4cacd2d9" }, "paper_data_uoa": "f4e2fb4d4cacd2d9" } ]