|
[HepData-svn] r1856 - trunk/hepdata-webapp/src/main/java/cedar/hepdata/formatsblackhole at projects.hepforge.org blackhole at projects.hepforge.orgWed Apr 22 12:34:55 BST 2015
Author: whalley Date: Wed Apr 22 12:34:55 2015 New Revision: 1856 Log: first go at yaml formatter Added: trunk/hepdata-webapp/src/main/java/cedar/hepdata/formats/YamlFormatter.java Added: trunk/hepdata-webapp/src/main/java/cedar/hepdata/formats/YamlFormatter.java ============================================================================== --- /dev/null 00:00:00 1970 (empty, because file is newly added) +++ trunk/hepdata-webapp/src/main/java/cedar/hepdata/formats/YamlFormatter.java Wed Apr 22 12:34:55 2015 (r1856) @@ -0,0 +1,239 @@ +package cedar.hepdata.formats; + +import cedar.hepdata.model.*; +import cedar.hepdata.util.*; +import cedar.hepdata.xml.*; +import cedar.hepdata.db.*; +import cedar.hepdata.webapp.components.*; + + +import java.util.*; +import java.text.*; +import java.io.File; +import java.io.*; + +import org.antlr.stringtemplate.*; +import com.Ostermiller.util.SignificantFigures; + + +public class YamlFormatter { + + public static String format(Paper p) { + StringBuffer s = new StringBuffer(); + String s4=" "; + String sp=" - "; + if(p == null) return null; + +// _headerXML contains all the bibliographic type stuff at the beginning + + s.append(_headerXML(p)); + +// here we deal with the extra resource are description/line files + + String[] descs = { "description1" , "description2" , "description3" , "description4" }; + String[] links = { "link1" , "link2" , "link3" , "link4" }; + String[] ids = new String[3]; + if(p.getSpiresId() != null ) { ids[0]=p.getSpiresId().toString(); } else { ids[0]=""; } + if(p.getInspireId() != null ) { ids[1]=p.getInspireId().toString(); } else { ids[1]=""; } + if(p.getRedId() != null ) { ids[2]=p.getRedId().toString(); } else { ids[2]=""; } + + boolean first=true; + for (int i=0; i<descs.length; i++){ + for(int j=0; j<ids.length; j++){ + String descfile = "/home/hepdata/resource/" +ids[j] + "/" + descs[i]; + String linkfile = "/home/hepdata/resource/" +ids[j] + "/" + links[i]; + File testdesc = new File(descfile); + File testlink = new File(linkfile); + String desc=""; + String link=""; + if(testdesc.exists() && testlink.exists() ){ + try { + BufferedReader in = new BufferedReader(new FileReader(descfile)); + desc=in.readLine(); + } + catch (IOException e){} + try { + BufferedReader in = new BufferedReader(new FileReader(linkfile)); + link=in.readLine(); + } + catch (IOException e){} + if(first){ s.append("extras:\n"); } + first=false; + s.append(sp+"description : "+desc+"\n"); + s.append(s4+"link : "+link+"\n"); + } + } + } + + +// next the insert.html file in the resource area + first=true; + for (int j=0; j<ids.length; j++){ + String insertfile = "/home/hepdata/resource/" +ids[j] + "/insert.html"; + File testinsert = new File(insertfile); + String insert=""; + if(testinsert.exists()){ + if(first) { s.append("inserts:\n"); } + first=false; + s.append(sp+"inserthtml: /resource/"+ids[j]+"insert.html\n"); + } + } + +// and now the datasets + + for (Dataset ds : p.getDatasets()){ + if(ds.getId() == 1) { s.append("datasets:\n"); } // first dataset, start the array + + s.append(_metadataXML(ds)); // dataset information in _metadataXML + + } + +// finally finish off (at present does nothing! + s.append(_footerXML()); + +// then write it all out. + return s.toString(); + } + + public static String _getTimestamp(){ + DateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss"); + Date date = new Date(); + return dateFormat.format(date); + } + + + public static String _headerXML(Paper p) { + String s4=" "; + String sp=" - "; + StringBuffer s = new StringBuffer(); + s.append("inspireid: "+p.getInspireId()+"\n"); + if(p.getSpiresId()!=null) { s.append("spires: "+p.getSpiresId()+"\n"); } + if(p.getArchive()!=null) { s.append("archive: "+p.getArchive().replaceAll("ARXIV","arXiv")+"\n"); } + if(p.getRedId()!=null) { s.append("durham: "+p.getRedId()+"\n"); } + if(p.getCdsId()!=null) { s.append("cds: "+p.getCdsId()+"\n"); } + s.append("dateupdated: "+p.getDateUpdated()+"\n"); + if(p.getTitle()!=null) { s.append("title: "+p.getTitle()+"\n"); } + if(p.getAuthors().size()>0){ + s.append("authors:\n"); + for (String author : p.getAuthors()){ + s.append(sp+"author: "+author+"\n"); + } + } + if(p.getReferences().size()>0){ + s.append("references:\n"); + for (Reference r : p.getReferences()){ + s.append(sp+"{description: "+r.getDescription()+", "); + s.append("date: "+r.getDate()+", "); + s.append("type: "+r.getType()+"}"); + s.append("\n"); + } + } + if(p.getModifications().size()>0){ + s.append("modifications:\n"); + for (Modification m : p.getModifications()){ + s.append(sp+"{action: "+m.getAction()+", "); + s.append("modifier: "+m.getModifier()+", "); + s.append("date: "+m.getComment()+"}"); + s.append("\n"); + } + } + + + s.append("collaboration: "+p.getInformalName()+"\n"); + s.append("experiment: "+p.getExperimentName()+"\n"); + if(p.getComments().size()>0){ + s.append("comments:\n"); + for (String comment : p.getComments()){ + s.append(sp+"comment: "+comment+"\n"); + } + } + return s.toString(); + } + + public static String _footerXML() { + StringBuffer s = new StringBuffer(); + return s.toString(); + } + + public static String _metadataXML(Dataset ds){ + String s4=" "; + String sp=" - "; + StringBuffer s = new StringBuffer(); + int idbase=9000000; + int id=idbase+ds.getId(); + String location = ""; + Paper p = ds.getPaper(); + + for (String ct : ds.getComments()){ + if(ct.startsWith("Location:")) { + location = ct.replaceFirst("Location:",""); + s.append(sp+"label: "+ct.replaceFirst("Location:","Data from")+" from: "+p.getTitle()+"\n"); + } + } + + for (String ct : ds.getComments()){ + if(!ct.startsWith("Location:")) { + s.append(s4+"comment: "+ct.replaceFirst("VERBATIM","").replaceAll(">",">").replaceAll("<","<")+"\n"); + } + } + + for (String dsreac : ds.getDsReactions()){ + s.append(s4+"reackey: "+dsreac+"\n"); + } + for (String dsobs : ds.getDsObservables()){ + s.append(s4+"obskey: "+dsobs+"\n"); + } + for (String dsplab : ds.getDsPlabs()){ + s.append(s4+"plabkey: \""+dsplab+"\n"); + } + + s.append(s4+"xaxes:\n"); + for (XAxis xax : ds.getXAxes()){ + s.append(s4+sp+"xheader: "+xax.getHeader()+"\n"); + s.append(s4+s4+"bins:\n"); + for (Bin bin : xax.getBins()){ + s.append(s4+s4+sp+"{id: "+bin.getId()+", "); + if(bin.getDescription() != null ) { s.append("description: "+bin.getDescription()+", "); } + s.append("relation: "+bin.getRelation()+", "); + s.append("focus: "+bin.getFocus()+", "); + s.append("high: "+bin.getHighValue()+", "); + s.append("low: "+bin.getLowValue()+""); + if(bin.getId() < xax.getBins().size()) {s.append("}");} + else{s.append("}");} + s.append("\n"); + } + } + + s.append(s4+"yaxes:\n"); + for (YAxis yax : ds.getYAxes()){ + s.append(s4+sp+"yheader: "+yax.getHeader()+"\n"); + boolean first=true; + for (Property prop : yax.getProperties()){ + if(first) { s.append(s4+s4+"qualifiers:\n"); } + first=false; + s.append(s4+s4+sp+"qualifier: "+prop.getFull()+"\n"); + } + for (String comment : yax.getComments()){ + if(first) { s.append(s4+s4+"qualifiers:\n"); } + first=false; + s.append(s4+s4+sp+"qualifier: "+comment+"\n"); + } + s.append(s4+s4+"points:\n"); + for (Point point : yax.getPoints()){ + s.append(s4+s4+sp+"id: "+point.getId()+"\n"); + s.append(s4+s4+s4+"value: "+point.getValue()+"\n"); + if(point.getErrors().size()>0){ + s.append(s4+s4+s4+"errors:\n"); + for(PointError error : point.getErrors()){ + s.append(s4+s4+s4+sp+"{plus: "+error.getPlus()+", "); + s.append("minus: \""+error.getMinus()+", "); + s.append("norm: "+error.getNormType()+", "); + s.append("type: "+error.getSourceType()+", "); + s.append("comment: "+error.getComment()+"}\n"); + } + } + } + } + return s.toString(); + } +}
More information about the HepData-svn mailing list |