blob: 5665cbec1c29ab55b335e394845cab70154ef15c [file] [log] [blame]
ramangrover29116eb972013-02-20 20:47:35 +00001WORKING_DIR=$1
2ASTERIX_INSTANCE_NAME=$2
ramangrover29@gmail.com5f248e12013-04-11 01:03:09 +00003<<<<<<< .working
ramangrover29@gmail.com7575caf2013-04-01 09:33:45 +00004ASTERIX_IODEVICES=$3
5NODE_STORE=$4
6ASTERIX_ROOT_METADATA_DIR=$5
ramangrover294b345912013-04-02 10:44:26 +00007TXN_LOG_DIR_NAME=$6
8BACKUP_ID=$7
9BACKUP_DIR=$8
10BACKUP_TYPE=$9
11NODE_ID=${10}
ramangrover29@gmail.com5f248e12013-04-11 01:03:09 +000012=======
13ASTERIX_DATA_DIR=$3
14BACKUP_ID=$4
15BACKUP_DIR=$5
16BACKUP_TYPE=$6
17NODE_ID=$7
18>>>>>>> .merge-right.r1677
ramangrover29116eb972013-02-20 20:47:35 +000019
ramangrover29@gmail.com5f248e12013-04-11 01:03:09 +000020<<<<<<< .working
ramangrover29@gmail.com7575caf2013-04-01 09:33:45 +000021nodeIODevices=$(echo $ASTERIX_IODEVICES | tr "," "\n")
ramangrover29@gmail.com5f248e12013-04-11 01:03:09 +000022=======
23nodeStores=$(echo $ASTERIX_DATA_DIR | tr "," "\n")
24>>>>>>> .merge-right.r1677
ramangrover29@gmail.com73bf75e2013-03-11 01:41:42 +000025
ramangrover29@gmail.com5f248e12013-04-11 01:03:09 +000026<<<<<<< .working
ramangrover29@gmail.com73bf75e2013-03-11 01:41:42 +000027if [ $BACKUP_TYPE == "hdfs" ];
28then
ramangrover294b345912013-04-02 10:44:26 +000029 HDFS_URL=${11}
30 HADOOP_VERSION=${12}
ramangrover29@gmail.com73bf75e2013-03-11 01:41:42 +000031 export HADOOP_HOME=$WORKING_DIR/hadoop-$HADOOP_VERSION
ramangrover29@gmail.com7575caf2013-04-01 09:33:45 +000032 index=1
33 for nodeIODevice in $nodeIODevices
ramangrover29@gmail.com73bf75e2013-03-11 01:41:42 +000034 do
ramangrover29@gmail.com7575caf2013-04-01 09:33:45 +000035 STORE_DIR=$nodeIODevice/$NODE_STORE
ramangrover294b345912013-04-02 10:44:26 +000036 TXN_LOG_DIR=$nodeIODevice/$TXN_LOG_DIR_NAME
37 NODE_BACKUP_DIR=$BACKUP_DIR/$ASTERIX_INSTANCE_NAME/$BACKUP_ID/$NODE_ID/
38
39 # make the destination directory
40 $HADOOP_HOME/bin/hadoop fs -mkdir $STORE_DIR $HDFS_URL/$NODE_BACKUP_DIR
41
42 # copy store directory
43 $HADOOP_HOME/bin/hadoop fs -copyFromLocal $STORE_DIR $HDFS_URL/$NODE_BACKUP_DIR/
44
45 # copy asterix root metadata directory and log directory from the primary(first) iodevice
ramangrover29@gmail.com7575caf2013-04-01 09:33:45 +000046 if [ $index -eq 1 ];
47 then
ramangrover294b345912013-04-02 10:44:26 +000048 # copy asterix root metadata directory
49 $HADOOP_HOME/bin/hadoop fs -copyFromLocal $nodeIODevice/$ASTERIX_ROOT_METADATA_DIR $HDFS_URL/$NODE_BACKUP_DIR/
50
51 # copy log directory
52 $HADOOP_HOME/bin/hadoop fs -copyFromLocal $TXN_LOG_DIR $HDFS_URL/$NODE_BACKUP_DIR/
ramangrover29@gmail.com7575caf2013-04-01 09:33:45 +000053 fi
ramangrover294b345912013-04-02 10:44:26 +000054
ramangrover29@gmail.com7575caf2013-04-01 09:33:45 +000055 index=`expr $index + 1`
ramangrover29@gmail.com73bf75e2013-03-11 01:41:42 +000056 done
57else
ramangrover29@gmail.com7575caf2013-04-01 09:33:45 +000058 index=1
59 for nodeIODevice in $nodeIODevices
ramangrover29@gmail.com73bf75e2013-03-11 01:41:42 +000060 do
ramangrover29@gmail.com7575caf2013-04-01 09:33:45 +000061 STORE_DIR=$nodeIODevice/$NODE_STORE
ramangrover294b345912013-04-02 10:44:26 +000062 TXN_LOG_DIR=$nodeIODevice/$TXN_LOG_DIR_NAME
63 NODE_BACKUP_DIR=$BACKUP_DIR/$ASTERIX_INSTANCE_NAME/$BACKUP_ID/$NODE_ID
64
65 # create the backup directory, if it does not exists
ramangrover29@gmail.com73bf75e2013-03-11 01:41:42 +000066 if [ ! -d $NODE_BACKUP_DIR ];
67 then
68 mkdir -p $NODE_BACKUP_DIR
69 fi
ramangrover294b345912013-04-02 10:44:26 +000070
71 # copy store directory
72 cp -r $STORE_DIR $NODE_BACKUP_DIR/
73
74 # copy asterix root metadata directory and log directory from the primary(first) iodevice
ramangrover29@gmail.com7575caf2013-04-01 09:33:45 +000075 if [ $index -eq 1 ];
76 then
ramangrover294b345912013-04-02 10:44:26 +000077 cp -r $nodeIODevice/$ASTERIX_ROOT_METADATA_DIR $NODE_BACKUP_DIR/
78
79 # copy log directory
80 cp -r $TXN_LOG_DIR $NODE_BACKUP_DIR/
ramangrover29@gmail.com7575caf2013-04-01 09:33:45 +000081 fi
ramangrover294b345912013-04-02 10:44:26 +000082
ramangrover29@gmail.com7575caf2013-04-01 09:33:45 +000083 index=`expr $index + 1`
ramangrover29@gmail.com73bf75e2013-03-11 01:41:42 +000084 done
85fi
ramangrover29@gmail.com5f248e12013-04-11 01:03:09 +000086=======
87if [ $BACKUP_TYPE == "hdfs" ];
88then
89 HDFS_URL=$8
90 HADOOP_VERSION=$9
91 export HADOOP_HOME=$WORKING_DIR/hadoop-$HADOOP_VERSION
92 for nodeStore in $nodeStores
93 do
94 MANGLED_DIR_NAME=`echo $nodeStores | tr / _`
95 NODE_BACKUP_DIR=$BACKUP_DIR/$ASTERIX_INSTANCE_NAME/$BACKUP_ID/$NODE_ID/$MANGLED_DIR_NAME
96 echo "$HADOOP_HOME/bin/hadoop fs -copyFromLocal $nodeStore/$NODE_ID/$ASTERIX_INSTANCE_NAME/ $HDFS_URL/$NODE_BACKUP_DIR/" >> ~/backup.log
97 $HADOOP_HOME/bin/hadoop fs -copyFromLocal $nodeStore/$NODE_ID/$ASTERIX_INSTANCE_NAME/ $HDFS_URL/$NODE_BACKUP_DIR/
98 done
99else
100 for nodeStore in $nodeStores
101 do
102 MANGLED_DIR_NAME=`echo $nodeStores | tr / _`
103 NODE_BACKUP_DIR=$BACKUP_DIR/$ASTERIX_INSTANCE_NAME/$BACKUP_ID/$NODE_ID/$MANGLED_DIR_NAME
104 if [ ! -d $NODE_BACKUP_DIR ];
105 then
106 mkdir -p $NODE_BACKUP_DIR
107 fi
108 echo "cp -r $nodeStore/$NODE_ID/$ASTERIX_INSTANCE_NAME/* $NODE_BACKUP_DIR/" >> ~/backup.log
109 cp -r $nodeStore/$NODE_ID/$ASTERIX_INSTANCE_NAME/* $NODE_BACKUP_DIR/
110 done
111fi
112>>>>>>> .merge-right.r1677