diff --git a/xfel_calibrate/calibrate.py b/xfel_calibrate/calibrate.py
index 0edd6f6ba773ccff1088f802612a89ae3c1d2066..d32a857ecbdc0d0a3ee856c274c7d38bebf880cc 100755
--- a/xfel_calibrate/calibrate.py
+++ b/xfel_calibrate/calibrate.py
@@ -472,7 +472,18 @@ def get_par_attr(parms, key, attr, default=None):
 
 
 def flatten_list(l):
-    return "_".join([str(flatten_list(v)) for v in l]) if isinstance(l, list) else l
+    """
+    Make a string representation of a list
+    :param l: List or a string
+    :return: Same string or string with first and last entry of a list
+    """
+    if isinstance(l, list):
+        if len(l)>1:
+            return '{}-{}'.format(l[0], l[-1])
+        else:
+            return '{}'.format(l[0])
+    else:
+        return str(l)
 
 
 def set_figure_format(nb, enable_vector_format):
@@ -568,10 +579,26 @@ def get_launcher_command(args, temp_path, dependent, job_list):
     return launcher_slurm.split()
 
 
+def remove_duplications(l):
+    """
+    Remove duplicated elements in the list
+
+    :param l: Input list
+    :return: Output list of unique elements
+    """
+    unique_l = []
+    for elem in l:
+        if elem not in unique_l:
+            unique_l.append(elem)
+    if unique_l != l:
+        print("Duplicated concurrency parameters were removed")
+    return unique_l
+
+
 def concurrent_run(temp_path, nb, nbname, args, cparm=None, cval=None,
                    final_job=False, job_list=[], fmt_args={}, cluster_cores=8,
                    sequential=False, dependent=False,
-                   show_title=True):
+                   show_title=True, cluster_profile='NO_CLUSTER'):
     """ Launch a concurrent job on the cluster via SLURM
     """
 
@@ -580,8 +607,7 @@ def concurrent_run(temp_path, nb, nbname, args, cparm=None, cval=None,
 
     suffix = flatten_list(cval)
     if "cluster_profile" in args:
-        args["cluster_profile"] = "{}_{}".format(
-            args["cluster_profile"], suffix)
+        args["cluster_profile"] = "{}_{}".format(cluster_profile, suffix)
 
     # first convert the notebook
     parms = extract_parameters(nb)
@@ -772,11 +798,12 @@ def run():
                 concurrency["parameter"])
             warnings.warn(msg, RuntimeWarning)
 
+        cluster_profile = "NO_CLUSTER"
         if not has_parm(parms, "cluster_profile"):
             warnings.warn("Notebook has no cluster_profile parameter, " +
                           "running on cluster will likely fail!", RuntimeWarning)
         elif "cluster_profile" not in args or args["cluster_profile"] == parser.get_default('cluster_profile'):
-            args["cluster_profile"] = "slurm_prof_{}".format(run_uuid)
+            cluster_profile = "slurm_prof_{}".format(run_uuid)
 
         # create a temporary output directory to work in
         run_tmp_path = "{}/slurm_tmp_{}".format(temp_path, run_uuid)
@@ -822,7 +849,8 @@ def run():
                                    final_job=True, job_list=joblist,
                                    fmt_args=fmt_args,
                                    cluster_cores=cluster_cores,
-                                   sequential=sequential)
+                                   sequential=sequential,
+                                   cluster_profile=cluster_profile)
 
             joblist.append(jobid)
         else:
@@ -873,6 +901,7 @@ def run():
 
             # get expected type
             cvtype = get_par_attr(parms, cvar, 'type', list)
+            cvals = remove_duplications(cvals)
 
             for cnum, cval in enumerate(cvals):
                 show_title = cnum == 0
@@ -885,7 +914,8 @@ def run():
                                        joblist, fmt_args,
                                        cluster_cores=cluster_cores,
                                        sequential=sequential,
-                                       show_title=show_title)
+                                       show_title=show_title,
+                                       cluster_profile=cluster_profile)
                 joblist.append(jobid)
 
         # Run dependent notebooks
@@ -902,7 +932,8 @@ def run():
                                        job_list=joblist, fmt_args=fmt_args,
                                        cluster_cores=cluster_cores,
                                        sequential=sequential,
-                                       dependent=True)
+                                       dependent=True,
+                                       cluster_profile=cluster_profile)
                 joblist.append(jobid)
 
         if not all([j is None for j in joblist]):