diff --git a/chipathlon/conf.py b/chipathlon/conf.py
index 5a99fd58409245522d11dcf31023474f1f57e716..bf2e1ac13f45b2020a7573fcb3e5009df9925ccb 100644
--- a/chipathlon/conf.py
+++ b/chipathlon/conf.py
@@ -37,7 +37,10 @@ peak_tools = [
     "gem",
     "peakranger",
     "ccat",
-    "music"
+    "music",
+    "zerone",
+    "hiddendomains",
+    "pepr"
 ]
 
 executables = [
@@ -52,6 +55,9 @@ executables = [
     "peakranger",
     "MUSIC",
     "CCAT",
+    "PePr",
+    "hiddenDomains",
+    "zerone",
     "run_spp_nodups",
     "chip-job-cat-peak",
     "chip-job-ccat-format-bed",
@@ -62,7 +68,9 @@ executables = [
     "chip-job-music",
     "chip-job-peakranger-format",
     "chip-job-sort-peak",
-    "chip-job-zcat-peak"
+    "chip-job-zcat-peak",
+    "chip-job-zerone-format",
+    "chip-job-hd-format"
 ]
 
 # Java needs to have -Xmx specified...
@@ -77,7 +85,10 @@ peak_types = {
     "gem": ["narrow"],
     "peakranger": ["narrow"],
     "ccat": ["broad"],
-    "music": ["narrow", "punctate", "broad"]
+    "music": ["narrow", "punctate", "broad"],
+    "zerone": ["broad"],
+    "hiddendomains": ["broad"],
+    "pepr": ["broad", "sharp"],
 }
 
 # File extensions
@@ -147,8 +158,8 @@ argument_types = {
 
 # Defines information about arguments
 argument_keys = {
-    "required": ["type", "changeable", "has_value", "required"],
-    "optional": ["default", "path", "file_type"]
+    "required": ["type", "changeable", "has_value"],
+    "optional": ["required", "default", "file_type", "path", "separator"]
 }
 
 # workflow_job keys
diff --git a/chipathlon/db.py b/chipathlon/db.py
index 4903f77e0b7f3740493096a6b02b27f999622f6c..427aa6225f6620f45f9861c38d266b8106dee5f6 100644
--- a/chipathlon/db.py
+++ b/chipathlon/db.py
@@ -18,7 +18,7 @@ def download_from_gridfs(host, gridfs_id, local_path, username=None, password=No
     if not os.path.isfile(local_path) or overwrite:
         for i in range(0, retries):
             print "Attempt #%s, downloading file with ID '%s' to '%s'" % (i + 1, gridfs_id, local_path)
-            if mdb.fetch_from_gridfs(bson.objectid.ObjectId(gridfs_id), localpath, checkmd5):
+            if mdb.fetch_from_gridfs(bson.objectid.ObjectId(gridfs_id), local_path, checkmd5):
                 return True
             else:
                 print "Download attempt #%s from GridFS failed, retrying..." % (i + 1)
diff --git a/chipathlon/generators/idr_generator.py b/chipathlon/generators/idr_generator.py
index 4fb0dc00a6a337db9368235587695ea78cd829ae..3303dcb9715c9f2346a26d46cf761f40e3396413 100644
--- a/chipathlon/generators/idr_generator.py
+++ b/chipathlon/generators/idr_generator.py
@@ -1,5 +1,6 @@
 from module_generator import ModuleGenerator
 from chipathlon.result import Result
+import collections
 
 class IdrGenerator(ModuleGenerator):
     """
@@ -36,14 +37,8 @@ class IdrGenerator(ModuleGenerator):
         )
         self.module_name = "idr"
         self.result_dict = {}
-        self.output_files = {
-            "peakranger": {},
-            "ccat": {},
-            "gem": {},
-            "spp": {},
-            "macs2": {},
-            "music": {}
-        }
+        self.output_files = collections.defaultdict(dict)
+
         self.output_files["peakranger"]["narrow"] = ["region_sorted.bed", "summit_sorted.bed"]
         self.output_files["ccat"]["broad"] = ["region_sorted.bed", "peak_sorted.bed"]
         self.output_files["gem"]["narrow"] = ["results_GEM_sorted.bed", "results_GPS_sorted.bed"]
@@ -52,6 +47,10 @@ class IdrGenerator(ModuleGenerator):
         self.output_files["music"]["narrow"] = ["sorted_scale_%s_all.bed" % (i,) for i in [129, 194, 291]]
         self.output_files["music"]["punctate"] = ["sorted_scale_%s_all.bed" % (i,) for i in [129, 194, 291, 437, 656, 985, 1477, 2216]]
         self.output_files["music"]["broad"] = ["sorted_scale_%s_all.bed" % (i,) for i in [1459, 2189, 3284, 4926, 7389, 11084, 16626]]
+        self.output_files["zerone"]["broad"] = ["results_sorted.bed"]
+        self.output_files["hiddendomains"]["broad"] = ["results_sorted.bed"]
+        self.output_files["pepr"]["broad"] = ["results_sorted.bed"]
+        self.output_files["pepr"]["sharp"] = ["results_sorted.bed"]
         if debug:
             print "[LOADING GENERATOR] IdrGenerator"
         return
diff --git a/chipathlon/generators/peak_call_generator.py b/chipathlon/generators/peak_call_generator.py
index 2c0a6f93831abfe078f419e7de978967b8eec562..aefae932e4d57f52a5262abcb19401fc85ef26c9 100644
--- a/chipathlon/generators/peak_call_generator.py
+++ b/chipathlon/generators/peak_call_generator.py
@@ -41,7 +41,10 @@ class PeakCallGenerator(ModuleGenerator):
             "macs2": self._macs2,
             "ccat": self._ccat,
             "peakranger": self._peakranger,
-            "music": self._music
+            "music": self._music,
+            "zerone": self._zerone,
+            "hiddendomains": self._hiddendomains,
+            "pepr": self._pepr
         }
         self.call_pairs = {}
         if debug:
@@ -146,6 +149,52 @@ class PeakCallGenerator(ModuleGenerator):
         }
         return (self.get_markers(run), inputs)
 
+    def _zerone(self, run, result):
+        """
+        :param run: The run to generate jobs for
+        :type run: :py:class:chipathlon.run.Run
+        :param result: The result to generate jobs for.
+        :type result: :py:class:chipathlon.result.Result
+        """
+        call_pair = self.call_pairs[result.full_name]
+        inputs = {
+            "control.bam": call_pair[0].full_name,
+            "signal.bam": call_pair[1].full_name
+        }
+        return (self.get_markers(run), inputs)
+
+    def _hiddendomains(self, run, result):
+        """
+        :param run: The run to generate jobs for
+        :type run: :py:class:chipathlon.run.Run
+        :param result: The result to generate jobs for.
+        :type result: :py:class:chipathlon.result.Result
+        """
+        call_pair = self.call_pairs[result.full_name]
+        inputs = {
+            "chrom.sizes": run.genome.get_chrom_sizes()["name"],
+            "control.bed": call_pair[0].full_name,
+            "signal.bed": call_pair[1].full_name,
+            "prefix": result.prefix
+        }
+        return (self.get_markers(run), inputs) 
+
+    def _pepr(self, run, result):
+        """
+        :param run: The run to generate jobs for
+        :type run: :py:class:chipathlon.run.Run
+        :param result: The result to generate jobs for.
+        :type result: :py:class:chipathlon.result.Result
+        """
+        call_pair = self.call_pairs[result.full_name]
+        inputs = {
+            "control.bed": call_pair[0].full_name,
+            "signal.bed": call_pair[1].full_name,
+            "prefix": result.prefix,
+            "peak_type": run.peak_type
+        }
+        return (self.get_markers(run), inputs) 
+
     def _make_call_pairs(self, run, result_list):
         """
         :param run: The run currently being processed.
@@ -185,12 +234,23 @@ class PeakCallGenerator(ModuleGenerator):
         :param run: The target run to generate jobs for.
         :type run: :py:class:`~chipathlon.run.Run`
         """
-        remove_duplicates_results = run.get_results("remove_duplicates", "no_dups_chr.bed")
+
+        if run.peak == "zerone":
+            print "zerone"
+            if run.file_type == "fastq":
+                results = run.get_results("align", "align.bam")
+            elif run.file_type == "bam":
+                results = run.get_results("download", "encode.bam")
+            # results = run.get_results("align", "align.bam")
+            print results
+        else:
+            results = run.get_results("remove_duplicates", "no_dups_chr.bed")
+
         module_markers = {"peak_call": self.get_markers(run)}
 
         all_result_names = []
         final_results = self.module.get_all_final_results(self.get_markers(run))
-        for paired_result in self._make_call_pairs(run, remove_duplicates_results):
+        for paired_result in self._make_call_pairs(run, results):
             for i, final_result in enumerate(final_results):
                 final_result_name = final_result["file_name"]
                 if final_result_name not in all_result_names:
@@ -218,11 +278,19 @@ class PeakCallGenerator(ModuleGenerator):
         :param result: The target result to create jobs for.
         :type result: :py:class:`~chipathlon.result.Result`
         """
-        remove_duplicate_results = run.get_results("remove_duplicates", "no_dups_chr.bed")
+        if run.peak == "zerone":
+            if run.file_type == "fastq":
+                results = run.get_results("align", "align.bam")
+            elif run.file_type == "bam":
+                results = run.get_results("download", "encode.bam")
+            print results
+        else:
+            results = run.get_results("remove_duplicates", "no_dups_chr.bed")
+
         prev_results = []
         control_accessions = result.get_accessions("control")
         signal_accessions = result.get_accessions("signal")
-        for prev_result in remove_duplicate_results:
+        for prev_result in results:
             if (set(prev_result.get_accessions("control")).issubset(control_accessions) and
                 set(prev_result.get_accessions("signal")).issubset(signal_accessions)):
                 prev_results.append(prev_result)
diff --git a/chipathlon/jobs/modules/peak_call.yaml b/chipathlon/jobs/modules/peak_call.yaml
index 3f5dc998aec5b1beba36a583cfc5b948997e9d3e..04206f58bba56c916e754962d63c2084cbe3b659 100644
--- a/chipathlon/jobs/modules/peak_call.yaml
+++ b/chipathlon/jobs/modules/peak_call.yaml
@@ -307,3 +307,104 @@ peak_call:
             results_sorted.bed:
               param_name: sorted_peaks
               final_result: true
+  - zerone[tool]:
+    - broad[peak_type]:
+      - zerone_callpeak:
+          inputs:
+            control.bam:
+              param_name: control.bam
+            signal.bam:
+              param_name: signal.bam
+          outputs:
+            peaks.bed:
+              param_name: result_peaks
+      - zerone_format:
+          inputs:
+            peaks.bed:
+              param_name: result_peaks
+          outputs:
+            results_sorted.bed:
+              param_name: full_result
+              final_result: true
+  - hiddendomains[tool]:
+    - broad[peak_type]:
+      - hiddendomains_callpeak:
+          inputs:
+            control.bed:
+              param_name: control.bed
+            signal.bed:
+              param_name: signal.bed
+            chrom.sizes:
+              param_name: chrom_sizes
+            prefix:
+              param_name: prefix
+          outputs:
+            analysis.bed:
+              param_name: result_peaks
+            vis.bed:
+              param_name: enriched_bins
+            domains.txt:
+              param_name: domains
+            control_bins.txt:
+              param_name: control_bins
+            treatment_bins.txt:
+              param_name: treatment_bins
+      - hiddendomains_format:
+          inputs:
+            analysis.bed:
+              param_name: result_peaks
+          outputs:
+            results_sorted.bed:
+              param_name: full_result
+              final_result: true
+  - pepr[tool]:
+    - sharp[peak_type]:
+      - cp:
+          inputs:
+            control.bed:
+              param_name: input_file
+          outputs:
+            control2.bed:
+              param_name: output_file
+    - broad[peak_type]:
+      - cp:
+          inputs:
+            control.bed:
+              param_name: input_file
+          outputs:
+            control2.bed:
+              param_name: output_file
+    - cp:
+        inputs:
+          signal.bed:
+            param_name: input_file
+        outputs:
+          signal2.bed:
+            param_name: output_file
+    - pepr_callpeak:
+        inputs:
+          control.bed:
+            param_name: control1.bed
+          control2.bed:
+            param_name: control2.bed
+          signal.bed:
+            param_name: signal1.bed
+          signal2.bed:
+            param_name: signal2.bed
+          prefix:
+            param_name: prefix
+          peak_type:
+            param_name: peak_type
+        outputs:
+          _PePr_peaks.bed:
+            param_name: result_peaks
+          parameters.txt:
+            param_name: pepr_params
+    - sort_awk_sort_peaks:
+        inputs:
+          _PePr_peaks.bed:
+            param_name: result_peaks
+        outputs:
+          results_sorted.bed:
+            param_name: sorted_peaks
+            final_result: true
diff --git a/chipathlon/jobs/params/hiddendomains_callpeak.yaml b/chipathlon/jobs/params/hiddendomains_callpeak.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..30b50d575a28caf4076d964b12002d7ae8d6123c
--- /dev/null
+++ b/chipathlon/jobs/params/hiddendomains_callpeak.yaml
@@ -0,0 +1,82 @@
+hiddendomains_callpeak:
+  inputs:
+    control.bed:
+      type: file
+      file_type: bed
+    signal.bed:
+      type: file
+      file_type: bed
+    chrom_sizes:
+      type: file
+      file_type: chrom_sizes
+    prefix:
+      type: string
+  outputs:
+    result_peaks:
+      type: file
+      file_type: bed
+    enriched_bins:
+      type: file
+      file_type: bed
+    domains:
+      type: file
+      file_type: txt
+    control_bins:
+      type: file
+      file_type: txt
+    treatment_bins:
+      type: file
+      file_type: txt
+  command: hiddenDomains
+  arguments:
+    - "-B":
+        type: string
+        changeable: false
+        required: true
+        has_value: false
+    - "-c":
+        type: file
+        changeable: false
+        required: true
+        has_value: true
+        default: "$control.bed"
+    - "-t":
+        type: file
+        changeable: false
+        required: true
+        has_value: true
+        default: "$signal.bed"
+    - "-g":
+        type: file
+        changeable: false
+        required: true
+        has_value: true
+        default: "$chrom_sizes"
+    - "-o":
+        type: string
+        changeable: false
+        required: true
+        has_value: true
+        default: "$prefix"
+    - "-b":
+        type: numeric
+        changeable: true
+        required: true
+        has_value: true
+        default: 1000
+    - "-p":
+        type: numeric
+        changeable: true
+        required: true
+        has_value: true
+        default: 0
+    - "-q":
+        type: numeric
+        changeable: true
+        required: true
+        has_value: true
+        default: 30
+  walltime: 240
+  memory: 16000
+  cores: 1
+  nodes: 1
diff --git a/chipathlon/jobs/params/hiddendomains_format.yaml b/chipathlon/jobs/params/hiddendomains_format.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..e230db2a40b43e415bb2bb4b06fa3142cc49dc88
--- /dev/null
+++ b/chipathlon/jobs/params/hiddendomains_format.yaml
@@ -0,0 +1,25 @@
+hiddendomains_format:
+  inputs:
+    result_peaks:
+      type: file
+      file_type: bed
+  outputs:
+    full_result:
+      type: file
+      file_type: bed
+  command: chip-job-hd-format
+  arguments:
+    - "$result_peaks":
+        type: file
+        changeable: false
+        required: true
+        has_value: false
+    - "$full_result":
+        type: file
+        changeable: false
+        required: true
+        has_value: false
+  walltime: 2000
+  memory: 2000
+  cores: 1
+  nodes: 1
diff --git a/chipathlon/jobs/params/pepr_callpeak.yaml b/chipathlon/jobs/params/pepr_callpeak.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..8b7b742cf7134988eff08311bf25d7e07efd2394
--- /dev/null
+++ b/chipathlon/jobs/params/pepr_callpeak.yaml
@@ -0,0 +1,100 @@
+pepr_callpeak:
+  inputs:
+    control1.bed:
+      type: file
+      file_type: bed
+    control2.bed:
+      type: file
+      file_type: bed
+    signal1.bed:
+      type: file
+      file_type: bed
+    signal2.bed:
+      type: file
+      file_type: bed
+    prefix:
+      type: string
+    peak_type:
+      type: string
+  outputs:
+    result_peaks:
+      type: file
+      file_type: bed
+    pepr_params:
+      type: file
+      file_type: txt
+  command: PePr
+  arguments:
+    - "-c":
+        type: file_list
+        separator: ","
+        changeable: false
+        required: true
+        has_value: true
+        default:
+          - $control1.bed
+          - $control2.bed
+    - "-i":
+        type: file_list
+        separator: ","
+        changeable: false
+        required: true
+        has_value: true
+        default:
+          - $signal1.bed
+          - $signal2.bed
+    - "-n":
+        type: string
+        changeable: true
+        required: true
+        has_value: true
+        default: $prefix
+    - "-f":
+        type: string
+        changeable: true
+        required: true
+        has_value: true
+        default: "bed"
+    - "-s":
+        type: numeric
+        changeable: true
+        required: false
+        has_value: true
+    - "-w":
+        type: numeric
+        changeable: true
+        required: false
+        has_value: true
+    - "--threshold":
+        type: string
+        changeable: true
+        required: false
+        has_value: true
+        default: "1e-5"
+    - "--peaktype":
+        type: string
+        changeable: true
+        required: true
+        has_value: true
+        default: $peak_type
+    - "--normalization":
+        type: string
+        changeable: true
+        required: false
+        has_value: true
+        default: "intra-group"
+    - "--keep-max-dup":
+        type: numeric
+        changeable: true
+        required: false
+        has_value: true
+    - "--num-processors":
+        type: numeric
+        changeable: true
+        required: false
+        has_value: true
+        default: 1
+  walltime: 120
+  memory: 16000
+  cores: 1
+  nodes: 1
diff --git a/chipathlon/jobs/params/zerone_callpeak.yaml b/chipathlon/jobs/params/zerone_callpeak.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..352bfc21bae5de4898888c79695aa3a43542d674
--- /dev/null
+++ b/chipathlon/jobs/params/zerone_callpeak.yaml
@@ -0,0 +1,42 @@
+zerone_callpeak:
+  inputs:
+    control.bam:
+      type: file
+      file_type: bam
+    signal.bam:
+      type: file
+      file_type: bam
+  outputs:
+    result_peaks:
+      type: stdout
+      file_type: bed
+  command: zerone
+  arguments:
+    - "--mock":
+        type: file
+        changeable: false
+        required: true
+        has_value: true
+        default: $control.bam
+    - "--chip":
+        type: file
+        changeable: false
+        required: true
+        has_value: true
+        default: $signal.bam
+    - "--window":
+        type: numeric
+        changeable: true
+        required: true
+        has_value: true
+        default: 300
+    - "--quality":
+        type: numeric
+        changeable: true
+        required: true
+        has_value: true
+        default: 20
+  walltime: 120
+  memory: 16000
+  cores: 1
+  nodes: 1
diff --git a/chipathlon/jobs/params/zerone_format.yaml b/chipathlon/jobs/params/zerone_format.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..ae8f71fbe3e8b0c26a679ae8c8fe8e2325ead47d
--- /dev/null
+++ b/chipathlon/jobs/params/zerone_format.yaml
@@ -0,0 +1,25 @@
+zerone_format:
+  inputs:
+    result_peaks:
+      type: file
+      file_type: bed
+  outputs:
+    full_result:
+      type: file
+      file_type: bed
+  command: chip-job-zerone-format
+  arguments:
+    - "$result_peaks":
+        type: file
+        changeable: false
+        required: true
+        has_value: false
+    - "$full_result":
+        type: file
+        changeable: false
+        required: true
+        has_value: false
+  walltime: 2000
+  memory: 2000
+  cores: 1
+  nodes: 1
diff --git a/chipathlon/workflow.py b/chipathlon/workflow.py
index d29bf080b7d716a5b62587dc1a8cf29e2e4839b7..161f562e2ceddc7cd2c0d50417fdde3ee17a03b9 100644
--- a/chipathlon/workflow.py
+++ b/chipathlon/workflow.py
@@ -35,11 +35,11 @@ class Workflow(object):
     :type username: str
     :param password: The password to authenticate for MongoDB access.
     :type password: str
-    :param execute_site: A list of sites to submit jobs to.  These sites should
-        be defined in the configuration file.
-    :type execute_site: list
+    :param execute_site: The target site to submit jobs to.  These sites should
+        be defined in the sites.xml file.
+    :type execute_site: str
     :param output_site: The output site to transfer files to.  This site should
-        be defined in the configuration file.
+        be defined in the sites.xml file.
     :type output_site: str
     :param save_db: Whether or not we want to save results to the database.
         True by default.
diff --git a/chipathlon/workflow_job.py b/chipathlon/workflow_job.py
index 7e2cff972588910b790b5880fdc076ae25967cf1..b56ad6644464d21fd8d64f5484ebc8765aae70a5 100644
--- a/chipathlon/workflow_job.py
+++ b/chipathlon/workflow_job.py
@@ -235,34 +235,35 @@ class WorkflowJob(object):
         the size of the list.
         """
         arg_value = self._get_arg_value(arg_name, arg_info)
-        if arg_info["type"] == "numeric":
-            # We need this as two seperate if statements otherwise we try
-            # to interpolate numeric arguments
-            if not is_number(arg_value):
-                return (False, "[Error parsing job %s]: Argument '%s' with value '%s' must be numeric." % (self, arg_name, arg_value))
-        elif arg_info["type"] in chipathlon.conf.argument_types["file"]:
-            # If the argument is a rawfile, validate it's extension & existance
-            if arg_info["type"] == "rawfile":
-                if os.path.isfile(arg_value):
-                    if not arg_value.endswith(tuple(chipathlon.conf.file_extensions[arg_info["file_type"]])):
-                        return (False, "[Error parsing job %s] Argument '%s' with file path '%s' is not of file type '%s'. \
-                                    Should match one of these extensions: %s." % (
-                                        self,
-                                        arg_name,
-                                        arg_value,
-                                        arg_info["file_type"],
-                                        chipathlon.conf.file_extensions[arg_info["file_type"]]
+        if arg_value is not None:
+            if arg_info["type"] == "numeric":
+                # We need this as two seperate if statements otherwise we try
+                # to interpolate numeric arguments
+                if not is_number(arg_value):
+                    return (False, "[Error parsing job %s]: Argument '%s' with value '%s' must be numeric." % (self, arg_name, arg_value))
+            elif arg_info["type"] in chipathlon.conf.argument_types["file"]:
+                # If the argument is a rawfile, validate it's extension & existance
+                if arg_info["type"] == "rawfile":
+                    if os.path.isfile(arg_value):
+                        if not arg_value.endswith(tuple(chipathlon.conf.file_extensions[arg_info["file_type"]])):
+                            return (False, "[Error parsing job %s] Argument '%s' with file path '%s' is not of file type '%s'. \
+                                        Should match one of these extensions: %s." % (
+                                            self,
+                                            arg_name,
+                                            arg_value,
+                                            arg_info["file_type"],
+                                            chipathlon.conf.file_extensions[arg_info["file_type"]]
+                                        )
                                     )
-                                )
-                else:
-                    return (False, "[Error parsing job %s]: Argument '%s' is a rawfile, however the specified path '%s' does not exist. " % (self, arg_name, arg_value))
-            # If the argument is a 'regular' file, we need to make sure that it
-            # references one of the keys of the inputs / outputs
-            elif not arg_value.startswith("$"):
-                return (False, "[Error parsing job %s]: Argument '%s' has value '%s'.  File references must start with a '$'." % (self, arg_name, arg_value))
-        elif isinstance(arg_value, str) and arg_value.startswith("$"):
-            if not any([str(arg_value)[1:] == ref for ref in (self.valid_inputs + self.valid_outputs)]):
-                return (False, "[Error parsing job %s]: Argument '%s' has reference '%s'.  No such input / output exists." % (self, arg_name, arg_value))
+                    else:
+                        return (False, "[Error parsing job %s]: Argument '%s' is a rawfile, however the specified path '%s' does not exist. " % (self, arg_name, arg_value))
+                # If the argument is a 'regular' file, we need to make sure that it
+                # references one of the keys of the inputs / outputs
+                elif not arg_value.startswith("$"):
+                    return (False, "[Error parsing job %s]: Argument '%s' has value '%s'.  File references must start with a '$'." % (self, arg_name, arg_value))
+            elif isinstance(arg_value, str) and arg_value.startswith("$"):
+                if not any([str(arg_value)[1:] == ref for ref in (self.valid_inputs + self.valid_outputs)]):
+                    return (False, "[Error parsing job %s]: Argument '%s' has reference '%s'.  No such input / output exists." % (self, arg_name, arg_value))
         return (True, None)
 
     def _get_arg_value(self, arg_name, arg_info):
@@ -285,10 +286,10 @@ class WorkflowJob(object):
             elif "path" in arg_info:
                 return self._get_path(arg_info["path"]) + arg_info["default"]
             else:
-                return arg_info["default"]
+                return arg_info.get("default")
         else:
             if arg_info["has_value"]:
-                return arg_info["default"]
+                return arg_info.get("default")
             elif "path" in arg_info:
                 return self._get_path(arg_info["path"]) + arg_name
             else:
@@ -512,16 +513,13 @@ class WorkflowJob(object):
         passed in.
         """
         arg_value = self._get_arg_value(arg_name, arg_info)
+        if self.debug:
+            print "arg_value: %s" % (arg_value,),
         add_value = arg_value
         if (isinstance(arg_value, str) and arg_value.startswith("$")) or arg_info["type"] == "rawfile":
             arg_value = arg_value[1:]
             if arg_info["type"] == "rawfile":
                 add_value = self.raw_files[os.path.basename(arg_value)]["file"]
-            elif arg_info["type"] == "file_list":
-                # Lists can only be loaded from inputs
-                add_value = []
-                for file_dict in inputs[arg_value]["values"]:
-                    add_value.append(file_dict["file"].name)
             else:
                 # Conditionally load from inputs / outputs
                 # This will only load the dict of information though, not
@@ -538,6 +536,11 @@ class WorkflowJob(object):
         elif arg_info["type"] == "rawfolder":
             # We want just the folder name
             add_value = os.path.basename(os.path.dirname(arg_value + "/"))
+        elif arg_info["type"] == "file_list":
+            add_value = []
+            for val in arg_value:
+                file_name = val[1:]
+                add_value.append((inputs if file_name in inputs else outputs)[file_name]["file"].name)
         return add_value
 
     def _create_arg_list(self, inputs, outputs):
@@ -566,9 +569,11 @@ class WorkflowJob(object):
             # Need to figure out 2 things:
             # 1. Should we add the argument name?
             # 2. What's the correct value to add?
+            if self.debug:
+                print "\t%s: Loading argument: %s, info: %s, " % (self, arg_name, arg_info),
             add_value = self._interpolate_value(inputs, outputs, arg_name, arg_info)
             if self.debug:
-                print "%s: Loading argument: %s, info: %s, value: %s" % (self, arg_name, arg_info, add_value)
+                print "Final value: %s" % (add_value)
             # Only add arguments that have a value
             if add_value is not None:
                 # Need to add in the arg_name and the arg_value
@@ -580,8 +585,11 @@ class WorkflowJob(object):
                     # If it's a list we want to add each of the Pegasus.File instances
                     elif isinstance(add_value, list):
                         arg_list.append(arg_name)
-                        for f in add_value:
-                            arg_list.append(f)
+                        if "separator" in arg_info:
+                            arg_list.append(arg_info["separator"].join(add_value))
+                        else:
+                            for f in add_value:
+                                arg_list.append(f)
                     # Otherwise, add stuff as a string
                     else:
                         arg_list.append("%s %s" % (arg_name, add_value))
diff --git a/doc/source/examples.rst b/doc/source/examples.rst
index 6ca1d13809cddb3e9bfd1b932844abc0588c20ea..a46a89699af9de708fe685e9b575ead8b0651759 100644
--- a/doc/source/examples.rst
+++ b/doc/source/examples.rst
@@ -1,14 +1,51 @@
 Examples
 ==========
 
-Whenever generating a workflow, there are three required files.  A config file,
-a run file, and a param file.  The config file is used to specify system
-information -- paths to required software, environment variables for pegasus
-and so on.  The run file is used to specify the actual files to process and
-what software tools to use on them.  Finally, the param file is used to
-override any default params for the jobs in the workflow.  In each of the
-examples below, all three of these files will be talked about, and download
-links to each will be provided.
+Whenever generating a workflow, there are five total required files you
+will need to create:
+
+*   **Config File**
+    A few pieces of info need to be defined in here, specifically the bin path
+    to the chipathlon environment, the bin path to the idr environment, and
+    the email address to message when the workflow is complete.
+*   **Param File**
+    Allows the user to overwrite options for many of the software tools being
+    used.  Most numeric arguments have defaults that can be changed by the
+    end-user.
+*   **Run File**
+    Describes the actually files to process and what alignment / peak calling
+    tools should be used on them, and whether or not to run idr.
+*   **Properties File**
+    One of the required files by pegasus.  For more information see their
+    `properties documentation <https://pegasus.isi.edu/documentation/properties.php>`_
+*   **Sites File**
+    One of the required files by pegasus.  For more information see their
+    `sites catalog documentation <https://pegasus.isi.edu/documentation/site.php>`_
+
+The information located in the properties file will be highly specific to
+the environment that you're submitting on.  Additionally, genomic information
+is expected to be downloaded & built for the target genome you're interested
+in, as well as a chromsome sizes files.
+
+Supported Tools
+^^^^^^^^^^^^^^^^
+
+Alignment:
+
+* `bwa <http://bio-bwa.sourceforge.net>`_
+* `bowtie2 <http://bowtie-bio.sourceforge.net/bowtie2/index.shtml>`_
+
+Peak Calling:
+
+* `spp <https://github.com/hms-dbmi/spp>`_ (narrow, broad)
+* `zerone <https://omictools.com/zerone-tool>`_ (broad)
+* `macs2 <https://github.com/taoliu/MACS>`_ (narrow, broad)
+* `gem <http://groups.csail.mit.edu/cgs/gem/>`_ (narrow)
+* `peakranger <http://ranger.sourceforge.net/manual1.18.html>`_ (narrow)
+* `ccat <http://ranger.sourceforge.net/manual1.18.html>`_ (broad)
+* `music <https://github.com/gersteinlab/MUSIC>`_ (narrow, punctate, broad)
+* `pepr <https://github.com/shawnzhangyx/PePr>`_ (narrow)
+* `hiddendomains <http://hiddendomains.sourceforge.net/>`_ (broad)
 
 Getting Started
 ^^^^^^^^^^^^^^^^
@@ -18,34 +55,28 @@ Getting Started
 
 :download:`Param <examples/small_test_param.yaml>`
 
+:download:`Properties <examples/small_test_properties.txt>`
+
+:download:`Sites <examples/small_test_sites.xml>`
+
 **Config**
 
-.. code-block:: yaml
+.. code-block:: text
+
+    chipathlon_bin: /home/swanson/aknecht/.conda/envs/chip/bin
+    idr_bin: /home/swanson/aknecht/.conda/envs/idr/bin
+    pegasus_home: /usr/share/pegasus/
+    email: YOUREMAIL@DOMAIN.com
 
-    notify:
-      pegasus_home: "/usr/share/pegasus/"
-      email: "avi@kurtknecht.com"
-    profile:
-      pegasus:
-        style: "glite"
-      condor:
-        grid_resource: "pbs"
-        universe: "vanilla"
-        batch_queue: "batch"
-      env:
-        PYTHONPATH: "/home/swanson/aknecht/.conda/envs/ih_env/lib/python2.7/site-packages/"
-        PATH: "/home/swanson/aknecht/.conda/envs/ih_env/bin:/bin/:/usr/bin/:/usr/local/bin/"
-        PEGASUS_HOME: "/usr/"
-
-Specifying an email in the config file will send an email to the target
-address once the workflow is complete.  The pegasus_home definition corresponds
-to the pegasus install location.  This is necessary so the pegasus email
-script (in pegasus/notification/email) can be found and executed successfully.
-The config file profile information is passed through to the pegasus
-`sites catalog <https://pegasus.isi.edu/documentation/site.php>`_.  This allows
-any pegasus `profile <https://pegasus.isi.edu/documentation/profiles.php>`_
-information to be passed.  The required information will be dependent on the
-system you are submitting to.
+The top two lines define the bin paths to the chipathlon and idr environments.
+The paths will depend on where you created your environments, but if you
+followed the installation instructions they will be in your home directory in
+the .conda folder.  These two paths are required to find all the necessary
+software to execute. Specifying an email in the config file will send an email
+to the target address once the workflow is complete.  The pegasus_home
+definition corresponds to the pegasus install location.  This is necessary so
+the pegasus email script (in pegasus/notification/email) can be found and
+executed successfully.
 
 **Run**
 
@@ -97,8 +128,8 @@ necessary for processing:
     Defines the type of files that processing initial begins with.  Should be
     either fastq or bam.
 * peak
-    The tool used for peak calling.  Should be one of [spp, gem, macs2,
-    peakranger, ccat, zerone, music].
+    The tool used for peak calling.  Above in the supported tools section there
+    is a list defining all peak calling tools, and their supporting peak types.
 * peak_type
     The type of peak calling to perform.  The peak type is tool dependent,
     as tools support different peak calling types.  Usually peak_type is narrow
@@ -115,8 +146,10 @@ When creating runs, often times you'll want to investigate the same files
 with multiple different peak calling and alignment tools.  In the case above,
 the two runs defined are identical except for the alignment tool -- one uses
 bwa and the other uses bowite2.  To avoid retyping a lot of information, lists
-can be marked with ids using the & symbol.  Later on in the file, the list can
-be referenced using the * symbol.
+can be marked with ids using the & symbol and a unique identifier.  Later on in
+the file, the list can be referenced using the * symbol.  Since we are only
+changing the alignment tool there's no need to type out all the samples a
+second time.
 
 **Param**
 
@@ -125,6 +158,10 @@ be referenced using the * symbol.
     macs2_callpeak:
       arguments:
         "-g": "mm"
+    bwa_align_single:
+      arguments:
+        "-l": 20
+        "-q": 6
     music_punctate:
       arguments:
         "--mapp": "/work/ladunga/SHARED/workflows/mm9_50bp"
@@ -144,6 +181,46 @@ specify the "-g": "mm" for macs2 peak calling.  The music peak caller requires
 additional information to run successfully (even though we are not using it).
 Finally, we specify not to remove duplicates.
 
+**Properties**
+
+.. code-block:: text
+
+    pegasus.catalog.site = XML
+    pegasus.catalog.site.file = small_test_sites.xml
+
+    pegasus.condor.logs.symlink = false
+    pegasus.transfer.links = true
+    pegasus.data.configuration = sharedfs
+
+Again, for more information on the properties file consult the pegasus
+`properties documentation <https://pegasus.isi.edu/documentation/properties.php>`_
+
+**Sites**
+
+.. code-block:: xml
+
+    <?xml version="1.0" ?>
+    <sitecatalog version="4.0" xmlns="http://pegasus.isi.edu/schema/sitecatalog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pegasus.isi.edu/schema/sitecatalog http://pegasus.isi.edu/schema/sc-4.0.xsd">
+      <site arch="x86_64" handle="local" os="LINUX">
+        <directory path="/lustre/work/ladunga/SHARED/workflows/new_tests/full_test/work" type="shared-scratch">
+          <file-server operation="all" url="file:///lustre/work/ladunga/SHARED/workflows/new_tests/full_test/work"/>
+        </directory>
+        <directory path="/lustre/work/ladunga/SHARED/workflows/new_tests/full_test/output" type="local-storage">
+          <file-server operation="all" url="file:///lustre/work/ladunga/SHARED/workflows/new_tests/full_test/output"/>
+        </directory>
+
+        <profile key="change.dir" namespace="pegasus">true</profile>
+        <profile key="transfer.threads" namespace="pegasus">4</profile>
+        <profile key="universe" namespace="condor">vanilla</profile>
+        <profile key="grid_resource" namespace="condor">pbs</profile>
+        <profile key="batch_queue" namespace="condor">batch</profile>
+        <profile key="style" namespace="pegasus">glite</profile>
+      </site>
+    </sitecatalog>
+
+Again, for more information on the sites file consult the pegasus
+`sites catalog documentation <https://pegasus.isi.edu/documentation/site.php>`_
+
 **Generation**
 
 To generate the workflow, pass these input files into the :ref:`chip-gen`
@@ -154,11 +231,12 @@ script, like so:
     chip-gen \
       --dir DIRECTORY_NAME \
       --host DB_HOST \
-      --username USERNAME \
-      --password PASSWORD \
       --param param.yaml \
       --conf config.yaml \
-      --run run.yaml
+      --run run.yaml \
+      --properties properties.txt \
+      --execute-site local \
+      --output-site local
 
 This will generate all files necessary to run the workflow in the specified
 directory under a date-time stamped folder.  The structure will look like this:
@@ -169,10 +247,8 @@ directory under a date-time stamped folder.  The structure will look like this:
         date-timestamp/
           input/
             chipathlon.dax
-            conf.rc
             db_meta/
             notify.sh
-            sites.xml
             submit.sh
           output/
           work/
diff --git a/doc/source/examples/small_test_config.yaml b/doc/source/examples/small_test_config.yaml
index 55eb7f7baed9426a33ac469a831af4f11b4b3411..75e486743e25c0f7ae17ffe7cb3e6726f05f14b9 100644
--- a/doc/source/examples/small_test_config.yaml
+++ b/doc/source/examples/small_test_config.yaml
@@ -1,14 +1,4 @@
-notify:
-  pegasus_home: "/usr/share/pegasus/"
-  email: "avi@kurtknecht.com"
-profile:
-  pegasus:
-    style: "glite"
-  condor:
-    grid_resource: "pbs"
-    universe: "vanilla"
-    batch_queue: "batch"
-  env:
-    PYTHONPATH: "/home/swanson/aknecht/.conda/envs/ih_env/lib/python2.7/site-packages/"
-    PATH: "/home/swanson/aknecht/.conda/envs/ih_env/bin:/bin/:/usr/bin/:/usr/local/bin/"
-    PEGASUS_HOME: "/usr/"
+chipathlon_bin: /home/swanson/aknecht/.conda/envs/chip/bin
+idr_bin: /home/swanson/aknecht/.conda/envs/idr/bin
+pegasus_home: /usr/share/pegasus/
+email: YOUREMAIL@DOMAIN.com
diff --git a/doc/source/examples/small_test_param.yaml b/doc/source/examples/small_test_param.yaml
index 8264fa5cc7d8a43425f1053a238434aeae7f0dcc..fee17c972cc699d27d4d200472f4c344226c68c9 100644
--- a/doc/source/examples/small_test_param.yaml
+++ b/doc/source/examples/small_test_param.yaml
@@ -1,6 +1,10 @@
 macs2_callpeak:
   arguments:
     "-g": "mm"
+bwa_align_single:
+  arguments:
+    "-l": 20
+    "-q": 6
 music_punctate:
   arguments:
     "--mapp": "/work/ladunga/SHARED/workflows/mm9_50bp"
diff --git a/doc/source/examples/small_test_properties.txt b/doc/source/examples/small_test_properties.txt
new file mode 100644
index 0000000000000000000000000000000000000000..2bb50e0f5e84d4a00966312ab5a9a870677c065b
--- /dev/null
+++ b/doc/source/examples/small_test_properties.txt
@@ -0,0 +1,6 @@
+pegasus.catalog.site = XML
+pegasus.catalog.site.file = small_test_sites.xml
+
+pegasus.condor.logs.symlink = false
+pegasus.transfer.links = true
+pegasus.data.configuration = sharedfs
diff --git a/doc/source/examples/small_test_sites.xml b/doc/source/examples/small_test_sites.xml
new file mode 100644
index 0000000000000000000000000000000000000000..000d22a5dd33961318e3ecb0a39650ff234080b9
--- /dev/null
+++ b/doc/source/examples/small_test_sites.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" ?>
+<sitecatalog version="4.0" xmlns="http://pegasus.isi.edu/schema/sitecatalog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pegasus.isi.edu/schema/sitecatalog http://pegasus.isi.edu/schema/sc-4.0.xsd">
+  <site arch="x86_64" handle="local" os="LINUX">
+    <directory path="/lustre/work/ladunga/SHARED/workflows/new_tests/full_test/work" type="shared-scratch">
+      <file-server operation="all" url="file:///lustre/work/ladunga/SHARED/workflows/new_tests/full_test/work"/>
+    </directory>
+    <directory path="/lustre/work/ladunga/SHARED/workflows/new_tests/full_test/output" type="local-storage">
+      <file-server operation="all" url="file:///lustre/work/ladunga/SHARED/workflows/new_tests/full_test/output"/>
+    </directory>
+
+    <profile key="change.dir" namespace="pegasus">true</profile>
+    <profile key="transfer.threads" namespace="pegasus">4</profile>
+    <profile key="universe" namespace="condor">vanilla</profile>
+    <profile key="grid_resource" namespace="condor">pbs</profile>
+    <profile key="batch_queue" namespace="condor">batch</profile>
+    <profile key="style" namespace="pegasus">glite</profile>
+  </site>
+</sitecatalog>
diff --git a/doc/source/installation.rst b/doc/source/installation.rst
index a9c57cfb17d05bdc445d8f434e9dae8ddc540505..ce8f3c7b39d9edb01af35a996c51475538557732 100644
--- a/doc/source/installation.rst
+++ b/doc/source/installation.rst
@@ -15,6 +15,8 @@ Dependencies for running workflows
 * `SPP <https://github.com/hms-dbmi/spp>`_
 * `peakranger <http://ranger.sourceforge.net/manual1.18.html>`_
 * `MUSIC <https://github.com/gersteinlab/MUSIC>`_
+* `PePr <https://github.com/shawnzhangyx/PePr>`_
+* `hiddenDomains <http://hiddendomains.sourceforge.net/>`_
 * `samtools <https://github.com/gersteinlab/MUSIC>`_
 * `bamtools <https://github.com/pezmaster31/bamtools>`_
 * `picard <https://github.com/pezmaster31/bamtools>`_
diff --git a/doc/source/introduction.rst b/doc/source/introduction.rst
index 13151d447f8fe9e9e0a5ea66cf37a3064fd5a5e5..7a2e70d4446ea53cc193c2ea9af2f4cc885d4959 100644
--- a/doc/source/introduction.rst
+++ b/doc/source/introduction.rst
@@ -10,7 +10,7 @@ steps leading up to and including peak calling can be very computationally
 expensive.  For this reason, we implemented a `Pegasus <pegasus.isi.edu>`_
 pipeline for creating and submitting workflows to super computers.
 
-Currently, there are 7 peak calling tools installed in ChIPathlon:
+Currently, there are 9 peak calling tools installed in ChIPathlon:
 
 1. `Gem <http://groups.csail.mit.edu/cgs/gem/>`_
 2. `Zerone <https://omictools.com/zerone-tool>`_
@@ -19,6 +19,8 @@ Currently, there are 7 peak calling tools installed in ChIPathlon:
 5. `peakranger <http://ranger.sourceforge.net/manual1.18.html>`_
 6. `MUSIC <https://github.com/gersteinlab/MUSIC>`_
 7. `CCAT <https://academic.oup.com/bioinformatics/article/26/9/1199/201375/A-signal-noise-model-for-significance-analysis-of>`_
+8. `PePr <https://github.com/shawnzhangyx/PePr>`_
+9. `hiddenDomains <http://hiddendomains.sourceforge.net/>`_
 
 The pipeline gives the flexibility of choosing any or all of these tools for
 running peak calling, and gives the ability to adjust parameters as necessary
diff --git a/doc/source/scripts.rst b/doc/source/scripts.rst
index f28962f3add05e40b81b3401f20d2523bf406e43..97d0833277fb3f651cbe7916a969dca2ebab948e 100644
--- a/doc/source/scripts.rst
+++ b/doc/source/scripts.rst
@@ -2,6 +2,15 @@ Scripts
 ========
 
 
+chip-create-run
+^^^^^^^^^^^^^^^
+
+.. argparse::
+    :filename: source/scripts/chip-create-run
+    :func: parser
+    :prog: chip-create-run
+
+
 chip-gen
 ^^^^^^^^
 
@@ -10,26 +19,84 @@ chip-gen
     :func: parser
     :prog: chip-gen
 
-chip-create-run
-^^^^^^^^^^^^^^^
+
+chip-job-ccat-format-bed
+^^^^^^^^^^^^^^^^^^^^^^^^
 
 .. argparse::
-    :filename: source/scripts/chip-create-run
+    :filename: source/scripts/chip-job-ccat-format-bed
     :func: parser
-    :prog: chip-create-run
+    :prog: chip-job-ccat-format-bed
+
+
+chip-job-chr-convert
+^^^^^^^^^^^^^^^^^^^^
+
+.. argparse::
+    :filename: source/scripts/chip-job-chr-convert
+    :func: parser
+    :prog: chip-job-chr-convert
+
+
+chip-job-download-encode
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. argparse::
+    :filename: source/scripts/chip-job-download-encode
+    :func: parser
+    :prog: chip-job-download-encode
+
+
+chip-job-download-gridfs
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. argparse::
+    :filename: source/scripts/chip-job-download-gridfs
+    :func: parser
+    :prog: chip-job-download-gridfs
+
+
+chip-job-music
+^^^^^^^^^^^^^^
+
+.. argparse::
+    :filename: source/scripts/chip-job-music
+    :func: parser
+    :prog: chip-job-music
+
+
+chip-job-peakranger-format
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. argparse::
+    :filename: source/scripts/chip-job-peakranger-format
+    :func: parser
+    :prog: chip-job-peakranger-format
+
+
+chip-job-save-result
+^^^^^^^^^^^^^^^^^^^^
+
+.. argparse::
+    :filename: source/scripts/chip-job-save-result
+    :func: parser
+    :prog: chip-job-save-result
+
 
 chip-meta-download
-^^^^^^^^^^^^^^^^^^^
+^^^^^^^^^^^^^^^^^^
 
 .. argparse::
     :filename: source/scripts/chip-meta-download
     :func: parser
     :prog: chip-meta-download
 
+
 chip-meta-import
-^^^^^^^^^^^^^^^^^
+^^^^^^^^^^^^^^^^
 
 .. argparse::
     :filename: source/scripts/chip-meta-import
     :func: parser
     :prog: chip-meta-import
+
diff --git a/doc/source/scripts/chip-create-run b/doc/source/scripts/chip-create-run
index c48c2d33dd71102d02654ebdde70c06d65abfd47..285858c148e169394c1b757fa84f0d412e228af7 100644
--- a/doc/source/scripts/chip-create-run
+++ b/doc/source/scripts/chip-create-run
@@ -6,9 +6,10 @@ import yaml
 import pprint
 
 parser = argparse.ArgumentParser(description="Create a run file from a list of experiment accessions.")
-parser.add_argument("-H", "--host", dest="host", required=True, help="Database host.")
-parser.add_argument("-u", "--username", dest="username", required=True, help="Database user.")
-parser.add_argument("-p", "--password", dest="password", required=True, help="Database user password.")
+parser.add_argument("-H", "--host", dest="host", default="localhost", help="Database host. (default: %(default)s)")
+parser.add_argument("-u", "--username", dest="username", help="Database username (if required).")
+parser.add_argument("-p", "--password", dest="password", help="Database password (if required).")
 parser.add_argument("-n", "--name", dest="name", required=True, help="Name of the run file to create.")
 parser.add_argument("-f", "--file_type", dest="file_type", default="fastq", help="Type of files to extract (fastq or bam).")
+
 parser.add_argument("-a", "--accessions", dest="accessions", nargs="+", required=True, help="List of experiment accessions to load.")
diff --git a/doc/source/scripts/chip-gen b/doc/source/scripts/chip-gen
index d6b80eb772e2dfc49093ecf47e9e297ecfb6accf..f4c258a1ed013bb501e88fd0172e33c3af0c1f5d 100644
--- a/doc/source/scripts/chip-gen
+++ b/doc/source/scripts/chip-gen
@@ -1,17 +1,19 @@
 #!/usr/bin/env python
 from chipathlon.workflow import Workflow
 import argparse
-import yaml
-import pprint
 
 parser = argparse.ArgumentParser(description="Generates a workflow from a run, param & config file.")
-parser.add_argument("-H", "--host", dest="host", required=True, help="Database host.")
-parser.add_argument("-u", "--username", dest="username", required=True, help="Database user.")
-parser.add_argument("-p", "--password", dest="password", required=True, help="Database user password.")
+parser.add_argument("-H", "--host", dest="host", default="localhost", help="Database host. (default: %(default)s)")
+parser.add_argument("-u", "--username", dest="username", help="Database username (if required).")
+parser.add_argument("-p", "--password", dest="password", help="Database password (if required).")
 parser.add_argument("-d", "--dir", dest="dir", required=True, help="Directory name to generate files in.")
 parser.add_argument("--param", dest="param", required=True, help="Path to param file to load.")
 parser.add_argument("--conf", dest="config", required=True, help="Path to config file to load.")
 parser.add_argument("--run", dest="run", required=True, help="Path to run file to load.")
+parser.add_argument("--properties", dest="properties", required=True, help="Path to pegasus properties file.")
+parser.add_argument("--execute-site", dest="execute_site", required=True, default="local", help="Target execute site.  Sites should be defined in configuration.")
+parser.add_argument("--output-site", dest="output_site", required=True, default="local", help="Target output site.  Site should be defined in configuration.")
 
+parser.add_argument("--no-save-db", dest="save_db", default=True, action="store_false", help="Whether or not to save results to the database.  Default: True")
 parser.add_argument("--rewrite", dest="rewrite", default=False, action="store_true", help="If specified, don't load from the database, rewrite files.")
 parser.add_argument("--debug", dest="debug", default=False, action="store_true", help="Print out more information while generating.")
diff --git a/doc/source/scripts/chip-job-ccat-format-bed b/doc/source/scripts/chip-job-ccat-format-bed
new file mode 100644
index 0000000000000000000000000000000000000000..67ba7b916be3142f5a9f19b1f02ba5dd77c507a3
--- /dev/null
+++ b/doc/source/scripts/chip-job-ccat-format-bed
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+import argparse
+import os
+
+# The ccat output is *mostly* correct, however simply sorting is not enough
+# as the fourth column labels are not done in correct chromosome order
+# After sorting the output looks like this:
+# chr1    3086860 3087035 ccat_131620  4   0   5.483551    0.577000
+# chr1    3318040 3318245 ccat_131610  4   0   5.483551    0.577000
+# chr1    3372210 3372465 ccat_87299  5   0   6.854439    0.462000
+# When it should look like this:
+# chr1    3086860 3087035 ccat_0  4   0   5.483551    0.577000    -1
+# chr1    3318040 3318245 ccat_1  4   0   5.483551    0.577000    -1
+# chr1    3372210 3372465 ccat_2  5   0   6.854439    0.462000    -1
+
+parser = argparse.ArgumentParser(description = "Format ccat result files.")
+parser.add_argument("--input", "-i", dest="input", required=True, help="Path to input ccat file.")
+parser.add_argument("--output", "-o", dest="output", required=True, help="Output file to write formatted results.")
diff --git a/doc/source/scripts/chip-job-chr-convert b/doc/source/scripts/chip-job-chr-convert
new file mode 100644
index 0000000000000000000000000000000000000000..48a572538c98b70feaade8002dc7c5dafed2abb4
--- /dev/null
+++ b/doc/source/scripts/chip-job-chr-convert
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+
+import pymysql
+import argparse
+import os
+
+parser = argparse.ArgumentParser(description="Replace chromosome locus with number.")
+parser.add_argument("-b", "--bed", dest="bed", required=True, help="Input bed file.")
+parser.add_argument("-o", "--out", dest="out", required=True, help="Output file.")
+parser.add_argument("-c", "--c", dest="chr", default=False, action="store_true", help="If specified convert from name to chr number, otherwise convert from chr number to name.")
+parser.add_argument("-d", "--db", dest="db", default="hg38", help="Database to load conversion from.")
+parser.add_argument("-s", "--server", dest="server", default="genome-mysql.cse.ucsc.edu", help="Location of mysql server.")
+parser.add_argument("-u", "--user", dest="user", default="genome", help="Username for db login.")
+parser.add_argument("-p", "--password", dest="password", default="", help="Password for db login.")
+parser.add_argument("-t", "--table", dest="table", default="ucscToINSDC", help="Table to retrieve locus -> number info from.")
diff --git a/doc/source/scripts/chip-job-download-encode b/doc/source/scripts/chip-job-download-encode
new file mode 100644
index 0000000000000000000000000000000000000000..581640c8fea828e823ad4aed8e902b466cb9ee6a
--- /dev/null
+++ b/doc/source/scripts/chip-job-download-encode
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+import chipathlon.utils
+import argparse
+
+parser = argparse.ArgumentParser(description="Download target file.")
+parser.add_argument("-u", "--url", dest="url", required=True, help="Target url.")
+parser.add_argument("-p", "--path", dest="path", required=True, help="Local path to file.")
+parser.add_argument("-t", "--url_type", dest="url_type", default="http://", help="Type of url to access.")
+parser.add_argument("-r", "--retries", dest="retries", default=3, type=int, help="Number of retries.")
+parser.add_argument("-n", "--overwrite", dest="overwrite", default=True, action="store_false", help="Dont' overwrite local file if exists.")
+parser.add_argument("-m", "--md5", dest="md5", help="Check md5 value against passed value.")
diff --git a/doc/source/scripts/chip-job-download-gridfs b/doc/source/scripts/chip-job-download-gridfs
new file mode 100644
index 0000000000000000000000000000000000000000..128293dc797b3adc8ad16d4f2654ea96409e111f
--- /dev/null
+++ b/doc/source/scripts/chip-job-download-gridfs
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import chipathlon.db
+import argparse
+
+parser = argparse.ArgumentParser(description="Download target file from GridFS.")
+parser.add_argument("-H", "--host", dest="host", default="localhost", help="Database host. (default: %(default)s)")
+parser.add_argument("-u", "--username", dest="username", help="Database username (if required).")
+parser.add_argument("-p", "--password", dest="password", help="Database password (if required).")
+parser.add_argument("-i", "--id", dest="gridfs_id", required=True, help="GridFS ID.")
+parser.add_argument("-d", "--destination", dest="destination", required=True, help="Local path to file destination.")
+parser.add_argument("-r", "--retries", dest="retries", default=3, type=int, help="Number of retries.")
+parser.add_argument("-n", "--no-overwrite", dest="overwrite", default=True, action="store_false", help="Don't overwrite local file if it exists.")
+parser.add_argument("-c", "--checkmd5", dest="checkmd5", action="store_true", help="Check md5 value of downloaded file against database value.")
diff --git a/doc/source/scripts/chip-job-music b/doc/source/scripts/chip-job-music
new file mode 100644
index 0000000000000000000000000000000000000000..dd45235a54cd60c5ef443099ea62ceca53334fe4
--- /dev/null
+++ b/doc/source/scripts/chip-job-music
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+
+import argparse
+import os
+import subprocess
+import shutil
+import glob
+
+parser = argparse.ArgumentParser(description="Run music peak caller prep & peak calling.")
+parser.add_argument("--prefix", dest="prefix", required=True, help="Unique prefixectory to create")
+parser.add_argument("--controls", dest="controls", required=True, nargs="+", help="Control files to process.")
+parser.add_argument("--signals", dest="signals", required=True, nargs="+", help="Signal files to process.")
+parser.add_argument("--mapp", dest="mapp", required=True, help="Path to mapability profile")
+
+parser.add_argument("--peak_type", dest="peak_type", required=True, default="narrow", choices=["narrow", "punctate", "broad"], help="Peak calling type to use.")
+parser.add_argument("--lmapp", dest="lmapp", default="50")
diff --git a/doc/source/scripts/chip-job-peakranger-format b/doc/source/scripts/chip-job-peakranger-format
new file mode 100644
index 0000000000000000000000000000000000000000..11a30a73911c5fe0231ec960d6ee5f4901b3280c
--- /dev/null
+++ b/doc/source/scripts/chip-job-peakranger-format
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+import argparse
+import math
+import sys
+import os
+
+# Peakranger has very different output than expected:
+# chr1    180330013   180330834   ranger_fdrPassed_0_pval_2.00647e-193_fdr_2.79703e-190   2.79703e-190    +
+# chr1    106321435   106322114   ranger_fdrPassed_1_pval_5.1557e-147_fdr_3.59352e-144    3.59352e-144    +
+# chr1    37474619    37475638    ranger_fdrPassed_2_pval_5.45776e-144_fdr_2.53604e-141   2.53604e-141    +
+# There is no calculated score, the pval needs to be stripped from a different
+# column, and the strand is on the end instead of the 6th column
+
+parser = argparse.ArgumentParser(description = "Format peakranger output files.")
+parser.add_argument("--input", "-i", dest="input", required=True, help="Input result file from peakranger.")
+parser.add_argument("--output", "-o", dest="output", required=True, help="Output file to write formatted results.")
diff --git a/doc/source/scripts/chip-job-save-result b/doc/source/scripts/chip-job-save-result
new file mode 100644
index 0000000000000000000000000000000000000000..29fafbee19dbb4d3fd8e456f31a62966c6d38b02
--- /dev/null
+++ b/doc/source/scripts/chip-job-save-result
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+import chipathlon.db
+import argparse
+import yaml
+import os
+
+parser = argparse.ArgumentParser(description="Insert a bed file into the database.")
+parser.add_argument("-u", "--username", dest="username", help="Database username (if required).")
+parser.add_argument("-p", "--password", dest="password", help="Database password (if required).")
+parser.add_argument("-d", "--host", dest="host", default="localhost", help="Database host. (default: %(default)s)")
+parser.add_argument("-f", "--file", dest="file", required=True, help="Path to result file.")
+parser.add_argument("-m", "--meta", dest="meta", required=True, help="Path to meta yaml file.")
diff --git a/doc/source/scripts/chip-meta-download b/doc/source/scripts/chip-meta-download
index e24177c8b65f4ba42f048c2e42a5e42fa54fa443..bdb2cfee1a334d87ddfb6fab77403ae0fa2a1825 100644
--- a/doc/source/scripts/chip-meta-download
+++ b/doc/source/scripts/chip-meta-download
@@ -12,3 +12,4 @@ import datetime
 
 parser = argparse.ArgumentParser(description="Download raw JSON for all experiments.")
 parser.add_argument("-o", "--output-dir", dest="outputdir", default=os.getcwd(), help="Output directory.  (default: %(default)s)")
+parser.add_argument("-q", "--quiet", action='store_true', help="Quiet mode.  Do not print progress information. (default: false)")
diff --git a/doc/source/scripts/chip-meta-import b/doc/source/scripts/chip-meta-import
index 4123d0d51c08659e4195a40a53f02cb9b4cd4cd6..5bbf665b7964496ff0101dd63510853e5a138d4d 100644
--- a/doc/source/scripts/chip-meta-import
+++ b/doc/source/scripts/chip-meta-import
@@ -9,9 +9,10 @@ import os
 import os.path
 
 parser = argparse.ArgumentParser(description="Read per-experiment JSON files and create experiment and samples collections.")
-parser.add_argument("-H", "--host", dest="host", default="hcc-anvil-175-9.unl.edu", help="Database host. (default: %(default)s)")
-parser.add_argument("-u", "--username", dest="username", default="aknecht", required=True, help="Database user. (default: %(default)s)")
-parser.add_argument("-p", "--password", dest="password", required=True, help="Database user password.")
+parser.add_argument("-H", "--host", dest="host", default="localhost", help="Database host. (default: %(default)s)")
+parser.add_argument("-u", "--username", dest="username", help="Database username (if required).")
+parser.add_argument("-p", "--password", dest="password", help="Database password (if required).")
 parser.add_argument("-i", "--input-dir", dest="inputdir", default=os.getcwd(), help="Directory containing per-experiment JSON files.  (default: %(default)s)")
 parser.add_argument("-d", "--drop", dest="drop", default=False, action="store_true", help="Drop data if it exists. (default: %(default)s)")
 parser.add_argument("-s", "--samples", dest="samples", default=False, action="store_true", help="Only recreate the samples collection.")
+parser.add_argument("-q", "--quiet", action='store_true', help="Quiet mode.  Do not print progress information. (default: false)")
diff --git a/doc/source/write_scripts.py b/doc/source/write_scripts.py
new file mode 100644
index 0000000000000000000000000000000000000000..1d203eae00896d1a318edacccf358b17b5bf36ab
--- /dev/null
+++ b/doc/source/write_scripts.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+import os
+import shutil
+import textwrap
+
+"""
+Helper file to get the auto-generated scripts file up to-date!
+The top of the scripts file should just be:
+
+Scripts
+========
+
+With a section for each individual script like this:
+
+chip-gen
+^^^^^^^^
+
+.. argparse::
+    :filename: source/scripts/chip-gen
+    :func: parser
+    :prog: chip-gen
+
+Additionally, we need copies of the scripts from the root level scripts/
+folder.  But we only want their parsing information!  If the script
+actually does anything it won't work... So... It's gross but I promise
+it's worth it.
+"""
+
+# Find the scripts folder from relative to where we are
+target_path = os.path.abspath(__file__) + "/"
+for _ in range(4):
+    target_path = os.path.dirname(target_path)
+target_path = os.path.join(target_path, "scripts")
+
+with open("scripts.rst", "w") as wh:
+    wh.write("Scripts\n========\n\n")
+    for root, dirs, files in os.walk(target_path):
+        for f in sorted(files):
+            # We only want to write files that have an argument parser in them
+            # We only want to write up to the point just before parse_args()
+            with open(os.path.join(root, f), "r") as rh:
+                lines = rh.readlines()
+                if any(["ArgumentParser" in line for line in lines]):
+                    with open(os.path.join("scripts", f), "w") as new_script:
+                        for line in lines:
+                            if "parse_args()" in line:
+                                break
+                            new_script.write(line)
+                    wh.write(textwrap.dedent("""
+                        %s
+                        %s
+
+                        .. argparse::
+                            :filename: source/scripts/%s
+                            :func: parser
+                            :prog: %s
+
+                        """ % (f, "^" * len(f), f, f)))
+        break
diff --git a/scripts/chip-job-hd-format b/scripts/chip-job-hd-format
new file mode 100644
index 0000000000000000000000000000000000000000..42dcc3586757a311648d52225b9d81a98ae308bd
--- /dev/null
+++ b/scripts/chip-job-hd-format
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+
+# Right now we are just padding missing columns with -1's.
+# This is not a good way of filling in missing data
+# but not currently sure how to do it correclty.
+awk '{print $0,-1,-1,-1,-1,-1}' "$1" > "$2"
diff --git a/scripts/chip-job-zerone-format b/scripts/chip-job-zerone-format
new file mode 100644
index 0000000000000000000000000000000000000000..9dd5181c4ea90e71dec2a8606e810d6514a58b96
--- /dev/null
+++ b/scripts/chip-job-zerone-format
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+
+# Right now we are just padding missing columns with -1's.
+# This is not a good way of filling in missing data
+# but not currently sure how to do it correclty.
+/bin/sort -k1,1V -k2,2n -k3,3n "$1" | sed "/^#/ d" | awk '{print $0,-1,-1}' > "$2"
diff --git a/setup.py b/setup.py
index de5977557b4394d6d961e6bb3333cd9a3589072c..56788b71cabf2680ed96f82cd4691e02e46b6ef0 100644
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,9 @@ setup(
         "scripts/chip-job-peakranger-format",
         "scripts/chip-job-save-result",
         "scripts/chip-job-sort-peak",
-        "scripts/chip-job-zcat-peak"
+        "scripts/chip-job-zcat-peak",
+        "scripts/chip-job-zerone-format",
+        "scripts/chip-job-hd-format"
     ],
     install_requires=["pymongo","pyyaml"],
     zip_safe=False