Support Parsing UNIX Time for ClassAd data

Change-Id: Iba0a687475edd30078b1e5cda1810244eed76219
Reviewed-on: https://asterix-gerrit.ics.uci.edu/783
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: abdullah alamoudi <bamousaa@gmail.com>
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
index 876639d..493bd3b 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
@@ -18,6 +18,9 @@
  */
 package org.apache.asterix.external.classad.test;
 
+import java.io.File;
+import java.io.PrintStream;
+import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.ArrayList;
@@ -36,6 +39,18 @@
 import org.apache.asterix.external.input.stream.LocalFSInputStream;
 import org.apache.asterix.external.library.ClassAdParser;
 import org.apache.asterix.external.util.FileSystemWatcher;
+import org.apache.asterix.formats.nontagged.AqlADMPrinterFactoryProvider;
+import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.BuiltinType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.commons.io.FileUtils;
+import org.apache.hyracks.algebricks.data.IPrinter;
+import org.apache.hyracks.algebricks.data.IPrinterFactory;
+import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import org.junit.Assert;
 
 import junit.framework.Test;
 import junit.framework.TestCase;
@@ -59,12 +74,74 @@
         return new TestSuite(ClassAdToADMTest.class);
     }
 
+    private void printTuple(ArrayTupleBuilder tb, IPrinter[] printers, PrintStream printStream)
+            throws HyracksDataException {
+        int[] offsets = tb.getFieldEndOffsets();
+        for (int i = 0; i < printers.length; i++) {
+            int offset = i == 0 ? 0 : offsets[i - 1];
+            int length = i == 0 ? offsets[0] : offsets[i] - offsets[i - 1];
+            printers[i].print(tb.getByteArray(), offset, length, printStream);
+            printStream.println();
+        }
+    }
+
+    @SuppressWarnings("rawtypes")
+    public void testSchemaful() {
+        try {
+            File file = new File("target/classad-wtih-temporals.adm");
+            File expected = new File(getClass().getResource("/results/classad-with-temporals.adm").toURI().getPath());
+            FileUtils.deleteQuietly(file);
+            PrintStream printStream = new PrintStream(Files.newOutputStream(Paths.get(file.toURI())));
+            String[] recordFieldNames = { "GlobalJobId", "Owner", "ClusterId", "ProcId", "RemoteWallClockTime",
+                    "CompletionDate", "QDate", "JobCurrentStartDate", "JobStartDate", "JobCurrentStartExecutingDate" };
+            IAType[] recordFieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.AINT32,
+                    BuiltinType.AINT32, BuiltinType.ADURATION, BuiltinType.ADATETIME, BuiltinType.ADATETIME,
+                    BuiltinType.ADATETIME, BuiltinType.ADATETIME, BuiltinType.ADATETIME };
+            ARecordType recordType = new ARecordType("value", recordFieldNames, recordFieldTypes, true);
+            int numOfTupleFields = 1;
+            ISerializerDeserializer[] serdes = new ISerializerDeserializer[1];
+            serdes[0] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(recordType);
+            IPrinterFactory[] printerFactories = new IPrinterFactory[1];
+            printerFactories[0] = AqlADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(recordType);
+            // create output descriptor
+            IPrinter[] printers = new IPrinter[printerFactories.length];
+            for (int i = 0; i < printerFactories.length; i++) {
+                printers[i] = printerFactories[i].createPrinter();
+            }
+            ClassAdObjectPool objectPool = new ClassAdObjectPool();
+            String[] files = new String[] { "/classad-with-temporals.classads" };
+            ClassAdParser parser = new ClassAdParser(recordType, false, false, false, null, null, null, objectPool);
+            ArrayTupleBuilder tb = new ArrayTupleBuilder(numOfTupleFields);
+            for (String path : files) {
+                List<Path> paths = new ArrayList<>();
+                paths.add(Paths.get(getClass().getResource(path).toURI()));
+                FileSystemWatcher watcher = new FileSystemWatcher(paths, null, false);
+                LocalFSInputStream in = new LocalFSInputStream(watcher);
+                SemiStructuredRecordReader recordReader = new SemiStructuredRecordReader(in, "[", "]");
+                while (recordReader.hasNext()) {
+                    tb.reset();
+                    IRawRecord<char[]> record = recordReader.next();
+                    parser.parse(record, tb.getDataOutput());
+                    tb.addFieldEndOffset();
+                    printTuple(tb, printers, printStream);
+                }
+                recordReader.close();
+                printStream.close();
+                Assert.assertTrue(FileUtils.contentEquals(file, expected));
+            }
+        } catch (Throwable th) {
+            System.err.println("TEST FAILED");
+            th.printStackTrace();
+            Assert.assertTrue(false);
+        }
+        System.err.println("TEST PASSED");
+    }
+
     /**
      *
      */
-    public void test() {
+    public void testSchemaless() {
         try {
-            // test here
             ClassAdObjectPool objectPool = new ClassAdObjectPool();
             ClassAd pAd = new ClassAd(objectPool);
             String[] files = new String[] { "/jobads.txt" };
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java b/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
index 5fd6f21..93b31ca 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
@@ -195,12 +195,14 @@
 
             if ((!evaluateExpr || keepBoth) && isExpr && positionBefore >= 0) {
                 // we will store a string representation of the expression
-                int len = lexer.getLexSource().getPosition() - positionBefore;
+                int len = lexer.getLexSource().getPosition() - positionBefore - 2;
                 // add it as it is to the classAd
                 Literal lit = objectPool.literalPool.get();
                 Value exprVal = objectPool.valuePool.get();
-                exprVal.setStringValue(exprPrefix
-                        + String.valueOf(lexer.getLexSource().getBuffer(), positionBefore, len) + exprSuffix);
+
+                exprVal.setStringValue((exprPrefix == null ? "" : exprPrefix)
+                        + String.valueOf(lexer.getLexSource().getBuffer(), positionBefore, len)
+                        + (exprSuffix == null ? "" : exprSuffix));
                 Literal.createLiteral(lit, exprVal, NumberFactor.NO_FACTOR);
                 if (!evaluateExpr) {
                     ad.insert(tv.getStrValue().toString(), lit);
@@ -318,9 +320,6 @@
                     throw new HyracksDataException("This record is closed, you can not add extra fields !!");
                 } else if (fieldId < 0 && recType.isOpen()) {
                     aStringFieldName.setValue(fldName);
-                    if (aStringFieldName.getStringValue().contains("org.apache.asterix.external.classad.TokenValue")) {
-                        System.err.println("we have a problem");
-                    }
                     stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
                     openRecordField = true;
                     fieldType = null;
@@ -362,7 +361,6 @@
         recBuilder.write(out, true);
     }
 
-    // The only method left
     private void writeFieldValueToBuffer(IAType fieldType, DataOutput out, String name, ExprTree tree, ClassAd pAd)
             throws IOException, AsterixException {
         Value val;
@@ -375,7 +373,6 @@
             case OP_NODE:
                 val = objectPool.valuePool.get();
                 if (pAd.evaluateAttr(name, val)) {
-
                 } else {
                     // just write the expr
                     val = ((Literal) pAd.getAttrList().get(name + "Expr")).getValue();
@@ -423,8 +420,36 @@
                 break;
             case INTEGER_VALUE:
                 if (checkType(ATypeTag.INT64, fieldType)) {
-                    aInt64.setValue(val.getLongVal());
-                    int64Serde.serialize(aInt64, out);
+                    if (fieldType == null || fieldType.getTypeTag() == ATypeTag.INT64) {
+                        aInt64.setValue(val.getLongVal());
+                        int64Serde.serialize(aInt64, out);
+                    } else if (fieldType.getTypeTag() == ATypeTag.INT32) {
+                        aInt32.setValue((int) val.getLongVal());
+                        int32Serde.serialize(aInt32, out);
+                    } else if (fieldType.getTypeTag() == ATypeTag.DOUBLE) {
+                        aDouble.setValue(val.getLongVal());
+                        doubleSerde.serialize(aDouble, out);
+                    } else if (fieldType.getTypeTag() == ATypeTag.INT16) {
+                        aInt16.setValue((short) val.getLongVal());
+                        int16Serde.serialize(aInt16, out);
+                    } else if (fieldType.getTypeTag() == ATypeTag.INT8) {
+                        aInt8.setValue((byte) val.getLongVal());
+                        int8Serde.serialize(aInt8, out);
+                    } else if (fieldType.getTypeTag() == ATypeTag.FLOAT) {
+                        aFloat.setValue(val.getLongVal());
+                        floatSerde.serialize(aFloat, out);
+                    }
+                } else if (checkType(ATypeTag.DATETIME, fieldType)) {
+                    // Classad uses Linux Timestamps (s instead of ms)
+                    aDateTime.setValue(val.getLongVal() * 1000);
+                    datetimeSerde.serialize(aDateTime, out);
+                } else if (checkType(ATypeTag.DURATION, fieldType)) {
+                    // Classad uses Linux Timestamps (s instead of ms)
+                    aDuration.setValue(0, val.getLongVal() * 1000);
+                    durationSerde.serialize(aDuration, out);
+                } else if (checkType(ATypeTag.INT32, fieldType)) {
+                    aInt32.setValue((int) val.getLongVal());
+                    int32Serde.serialize(aInt32, out);
                 } else if (checkType(ATypeTag.DOUBLE, fieldType)) {
                     aDouble.setValue(val.getLongVal());
                     doubleSerde.serialize(aDouble, out);
@@ -447,14 +472,39 @@
                 break;
             case REAL_VALUE:
                 if (checkType(ATypeTag.DOUBLE, fieldType)) {
-                    aDouble.setValue(val.getDoubleVal());
-                    doubleSerde.serialize(aDouble, out);
+                    if (fieldType == null || fieldType.getTypeTag() == ATypeTag.DOUBLE) {
+                        aDouble.setValue(val.getDoubleVal());
+                        doubleSerde.serialize(aDouble, out);
+                    } else if (fieldType.getTypeTag() == ATypeTag.INT32) {
+                        aInt32.setValue((int) val.getDoubleVal());
+                        int32Serde.serialize(aInt32, out);
+                    } else if (fieldType.getTypeTag() == ATypeTag.INT64) {
+                        aInt64.setValue((long) val.getDoubleVal());
+                        int64Serde.serialize(aInt64, out);
+                    } else if (fieldType.getTypeTag() == ATypeTag.INT16) {
+                        aInt16.setValue((short) val.getDoubleVal());
+                        int16Serde.serialize(aInt16, out);
+                    } else if (fieldType.getTypeTag() == ATypeTag.INT8) {
+                        aInt8.setValue((byte) val.getDoubleVal());
+                        int8Serde.serialize(aInt8, out);
+                    } else if (fieldType.getTypeTag() == ATypeTag.FLOAT) {
+                        aFloat.setValue((float) val.getDoubleVal());
+                        floatSerde.serialize(aFloat, out);
+                    }
                 } else if (checkType(ATypeTag.INT32, fieldType)) {
                     aInt32.setValue((int) val.getDoubleVal());
                     int32Serde.serialize(aInt32, out);
                 } else if (checkType(ATypeTag.INT64, fieldType)) {
                     aInt64.setValue((long) val.getDoubleVal());
                     int64Serde.serialize(aInt64, out);
+                } else if (checkType(ATypeTag.DATETIME, fieldType)) {
+                    // Classad uses Linux Timestamps (s instead of ms)
+                    aDateTime.setValue(val.getLongVal() * 1000);
+                    datetimeSerde.serialize(aDateTime, out);
+                } else if (checkType(ATypeTag.DURATION, fieldType)) {
+                    // Classad uses Linux Timestamps (s instead of ms)
+                    aDuration.setValue(0, (long) (val.getDoubleVal() * 1000.0));
+                    durationSerde.serialize(aDuration, out);
                 } else {
                     throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
                 }
diff --git a/asterix-external-data/src/test/resources/classad-with-temporals.classads b/asterix-external-data/src/test/resources/classad-with-temporals.classads
new file mode 100644
index 0000000..e20be09
--- /dev/null
+++ b/asterix-external-data/src/test/resources/classad-with-temporals.classads
@@ -0,0 +1,134 @@
+
+    [
+        Schedd = "submit-5.chtc.wisc.edu";
+        BlockWrites = 3;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1459300924;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 6.607100000000000E+04;
+        NiceUser = false;
+        BytesRecvd = 7.292000000000000E+03;
+        RequestMemory = 12288;
+        ResidentSetSize = 750000;
+        StreamOut = false;
+        SpooledOutputFiles = "job697_results.tar.gz";
+        Arguments = "";
+        OnExitRemove = true;
+        ImageSize_RAW = 607024;
+        RemoteWallClockTime = 6.629100000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 4;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 8.580547200000000E+07;
+        LastRejMatchReason = "no match found ";
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 4;
+        RecentBlockReadKbytes = 0;
+        TransferInputSizeMB = 0;
+        BlockReadKbytes = 0;
+        LocalSysCpu = 0.0;
+        Iwd = "/home/grandaduarte/mars/mhb1";
+        Cmd = "/home/grandaduarte/mars/mhb1/job697.sh";
+        CommittedSuspensionTime = 0;
+        RecentStatsLifetimeStarter = 1200;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        BufferSize = 524288;
+        JobCurrentStartTransferOutputDate = 1459367212;
+        RecentBlockWrites = 0;
+        CompletionDate = 1459367213;
+        LastMatchTime = 1459300922;
+        LastJobLeaseRenewal = 1459367213;
+        DAGManNodesLog = "/home/grandaduarte/mars/mhb1/./dagman.dag.nodes.log";
+        ClusterId = 16798777;
+        JobUniverse = 5;
+        NumJobStarts = 1;
+        ProcId = 0;
+        PeriodicHold = false;
+        CondorPlatform = "$CondorPlatform: x86_64_RedHat6 $";
+        JobFinishedHookDone = 1459367213;
+        In = "/dev/null";
+        DiskUsage = 7500000;
+        EncryptExecuteDirectory = false;
+        User = "grandaduarte@chtc.wisc.edu";
+        LeaveJobInQueue = false;
+        Requirements = ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || ( ( MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" ) && ( TARGET.OpSysMajorVer == MY.LinuxVer || TARGET.OpSysMajorVer == MY.LinuxVerAlt || TARGET.OpSysMajorVer == MY.WinVer ) ) ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        MaxHosts = 1;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.175";
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        EnteredCurrentStatus = 1459367213;
+        JobLeaseDuration = 2400;
+        QDate = 1459298672;
+        AccountingGroup = EngrPhysics_Wilson;
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        Environment = "";
+        LinuxVer = 6;
+        DAGNodeName = "_mars_MH1B1_661.inp";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorVersion = "$CondorVersion: 8.5.3 Mar 14 2016 BuildID: 358989 $";
+        UserLog = "/home/grandaduarte/mars/mhb1/job697.log";
+        JobCurrentStartDate = 1459300922;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        BufferBlockSize = 32768;
+        BlockWriteKbytes = 24;
+        ExitBySignal = false;
+        DAGManJobId = 16795779;
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        CumulativeSuspensionTime = 0;
+        MyType = "Job";
+        Rank = 0.0;
+        JobNotification = 0;
+        Owner = "grandaduarte";
+        LinuxVerAlt = 6;
+        Err = "job697.err";
+        PeriodicRemove = false;
+        CommittedTime = 66291;
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1459300922;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-5.chtc.wisc.edu#16798777.0#1459298672";
+        RemoteSysCpu = 6.100000000000000E+01;
+        LastRejMatchTime = 1459300921;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 6.629100000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 0;
+        LastRemoteHost = "slot1_7@e375.chtc.wisc.edu";
+        TransferInput = "job697.sh";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = false;
+        WinVer = 601;
+        LastPublicClaimId = "<128.105.245.175:9618>#1457031418#19008#...";
+        NumCkpts_RAW = 0;
+        Out = "job697.out";
+        SubmitEventNotes = "DAG Node: _mars_MH1B1_661.inp";
+        CumulativeSlotTime = 6.629100000000000E+04;
+        JobRunCount = 1;
+        RecentBlockReads = 0;
+        StreamErr = false;
+        DiskUsage_RAW = 6625678;
+        RequestDisk = 20971520;
+        ResidentSetSize_RAW = 597536;
+        OrigMaxHosts = 1;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 66289;
+        ImageSize = 750000
+    ]
\ No newline at end of file
diff --git a/asterix-external-data/src/test/resources/results/classad-with-temporals.adm b/asterix-external-data/src/test/resources/results/classad-with-temporals.adm
new file mode 100644
index 0000000..3cd630b
--- /dev/null
+++ b/asterix-external-data/src/test/resources/results/classad-with-temporals.adm
@@ -0,0 +1 @@
+{ "GlobalJobId": "submit-5.chtc.wisc.edu#16798777.0#1459298672", "Owner": "grandaduarte", "ClusterId": 16798777i32, "ProcId": 0i32, "RemoteWallClockTime": duration("PT18H24M51S"), "CompletionDate": datetime("2016-03-30T19:46:53.000Z"), "QDate": datetime("2016-03-30T00:44:32.000Z"), "JobCurrentStartDate": datetime("2016-03-30T01:22:02.000Z"), "JobStartDate": datetime("2016-03-30T01:22:02.000Z"), "JobCurrentStartExecutingDate": datetime("2016-03-30T01:22:04.000Z"), "StatsLifetimeStarter": 66289, "SubmitEventNotes": "DAG Node: _mars_MH1B1_661.inp", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.175", "OnExitRemove": true, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 750000, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 6625678, "EnteredCurrentStatus": 1459367213, "ResidentSetSize_RAW": 597536, "RequestDisk": 20971520, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/grandaduarte/mars/mhb1/job697.sh", "CondorVersion": "$CondorVersion: 8.5.3 Mar 14 2016 BuildID: 358989 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "WinVer": 601, "RemoteUserCpu": 66071.0d, "BlockWrites": 3, "NiceUser": false, "Out": "job697.out", "ImageSize_RAW": 607024, "BytesSent": 8.5805472E7d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "job697.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1459367213, "ImageSize": 750000, "Schedd": "submit-5.chtc.wisc.edu", "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "job697_results.tar.gz", "BlockWriteKbytes": 24, "WhenToTransferOutput": "ON_EXIT", "ExitBySignal": false, "LastMatchTime": 1459300922, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 12288, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 16795779, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "AccountingGroup": "EngrPhysics_Wilson", "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 66291, "ExecutableSize_RAW": 4, "LastRejMatchReason": "no match found ", "LastSuspensionTime": 0, "UserLog": "/home/grandaduarte/mars/mhb1/job697.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 66291.0d, "LastJobLeaseRenewal": 1459367213, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 7292.0d, "CondorPlatform": "$CondorPlatform: x86_64_RedHat6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "_mars_MH1B1_661.inp", "PeriodicRelease": false, "JobRunCount": 1, "LastRemoteHost": "slot1_7@e375.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 4, "RemoteSysCpu": 61.0d, "TransferInput": "job697.sh", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/grandaduarte/mars/mhb1/./dagman.dag.nodes.log", "Requirements": "( MY.JobUniverse == 12 || MY.JobUniverse == 7 || ( ( MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" ) && ( TARGET.OpSysMajorVer == MY.LinuxVer || TARGET.OpSysMajorVer == MY.LinuxVerAlt || TARGET.OpSysMajorVer == MY.WinVer ) ) ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "LinuxVerAlt": 6, "DiskUsage": 7500000, "LinuxVer": 6, "LastRejMatchTime": 1459300921, "JobLeaseDuration": 2400, "BufferSize": 524288, "IsCHTCSubmit": true, "JobCurrentStartTransferOutputDate": 1459367212, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 66291.0d, "Environment": "", "LastPublicClaimId": "<128.105.245.175:9618>#1457031418#19008#...", "Iwd": "/home/grandaduarte/mars/mhb1", "CurrentHosts": 0, "Arguments": "", "User": "grandaduarte@chtc.wisc.edu", "StreamOut": false }