Skip to main content

Table 11 Results summery of sensitivity analysis w.r.t number of workers at \(\alpha = 0.1\)

From: Genetic algorithm based hybrid approach to solve fuzzy multi-objective assignment problem using exponential membership function

No. of worker

Case

\(\lambda\)

\(\mu _{ij}\)

Objective values

Optimum allocations \(x_{ij}\)

Workers-7

1

0.7480

(0.8986, 0.9066, 0.8964)

(0.9182, 0.9036, 0.8840)

(0.8606, 0.8387, 0.7480)

(105, 132, 168.9)

(68, 95, 131.9)

(36.7, 61, 87.1)

\({x_{13}, x_{112}, x_{115}, x_{24}, x_{210},}\)

\({x_{31}, x_{36}, x_{38}, x_{411}, x_{413},}\)

\({x_{55}, x_{59}, x_{62}, x_{67}, x_{614}}\)

2

0.8067

(0.9198, 0.9280, 0.9414)

(0.8768, 0.8616, 0.8491)

(0.9052, 0.8875, 0.8067)

(100.9, 127, 156.7)

(76.1, 104, 140)

(29.6, 53, 79.1)

\({x_{11}, x_{14}, x_{111}, x_{23}, x_{210},}\)

\({x_{215}, x_{38}, x_{312}, x_{45}, x_{52},}\)

\({ x_{56}, x_{513}, x_{69}, x_{614}, x_{77}}\)

3

0.7303

(0.7716, 0.7433, 0.7303)

(0.8944, 0.8933, 0.8805)

(0.9648, 0.9577, 0.9162)

(85.2, 114, 148.2)

(84.9, 111, 148.8)

(40.7, 65, 91.1)

\({x_{12}, x_{18}, x_{112}, x_{23}, x_{24},}\)

\({x_{36}, x_{37}, x_{314}, x_{49}, x_{411},}\)

\({x_{413}, x_{52}, x_{610}, x_{615}, x_{75}}\)

4

0.8131

(0.8131, 0.8202, 0.8304)

(0.8362, 0.8263, 0.8287)

(0.9597, 0.9523, 0.9070)

(80.6, 104, 133.7)

(98.3, 128, 163.1)

(42.7, 67, 93.1)

\({x_{13}, x_{19}, x_{111}, x_{21}, x_{213},}\)

\({x_{32}, x_{35}, x_{314}, x_{44}, x_{46},}\)

\({x_{47}, x_{58}, x_{512}, x_{515}, x_{610}}\)

5

0.7539

(0.8179, 0.8122, 0.8251)

(0.9353, 0.9320, 0.9332)

(0.8937, 0.8652, 0.7539)

(90, 117, 147.6)

(113.4, 144, 180.9)

(23.6, 47, 74)

\({x_{19}, x_{111}, x_{215}, x_{33}, x_{34},}\)

\({x_{314}, x_{45}, x_{48}, x_{51}, x_{512},}\)

\({x_{513}, x_{62}, x_{610}, x_{76}, x_{77}}\)

Workers-8

1

0.7932

(0.9092, 0.9083, 0.9110)

(0.8911, 0.8836, 0.8578)

(0.8482, 0.8187, 0.7932)

(108.2, 137, 170.3)

(71.8, 97, 134.8)

(37.8, 63, 90)

\({x_{111}, x_{114}, x_{24}, x_{215}, x_{38},}\)

\({x_{46}, x_{47}, x_{51}, x_{63}, x_{69},}\)

\({x_{710}, x_{712}, x_{713}, x_{82}, x_{85}}\)

2

0.7752

(0.9492, 0.9511, 0.9385)

(0.8113, 0.7849, 0.7752)

(0.9389, 0.9266, 0.9225)

(97.9, 124, 161.8)

(87.4, 118, 154)

(21.4, 43, 69.1)

\({x_{11}, x_{13}, x_{112}, x_{210}, x_{34},}\)

\({x_{314}, x_{46}, x_{47}, x_{411}, x_{58},}\)

\({x_{513}, x_{62}, x_{615}, x_{85}, x_{89}}\)

3

0.7408

(0.7504, 0.7408, 0.7626)

(0.8687, 0.8702, 0.8712)

(0.9754, 0.9685, 0.9638)

(91, 118, 146.8)

(90.8, 116, 149.3)

(34.7, 59, 85.1)

\({x_{111}, x_{112}, x_{21}, x_{23}, x_{215},}\)

\({x_{314}, x_{45}, x_{48}, x_{49}, x_{52},}\)

\({x_{59}, x_{610}, x_{84}, x_{87}, x_{813}}\)

4

0.8069

(0.8112, 0.8181, 0.8096)

(0.8719, 0.8666, 0.8621)

(0.9698, 0.9604, 0.9537)

(83.6, 107, 139.4)

(90, 117, 152.1)

(37.8, 63, 89.1)

\({x_{14}, x_{111}, x_{115}, x_{29}, x_{38},}\)

\({x_{314}, x_{46}, x_{412}, x_{55}, x_{513},}\)

\({x_{62}, x_{610}, x_{77}, x_{83}}\)

5

0.8168

(0.8434, 0.8352, 0.8423)

(0.9572, 0.9560, 0.9540)

(0.8645, 0.8347, 0.8168)

(90, 117, 147.6)

(104.2, 132, 168.9)

(25.6, 49, 76)

\({x_{14}, x_{111}, x_{215}, x_{314}, x_{46},}\)

\({x_{48}, x_{53}, x_{55}, x_{59}, x_{61},}\)

\({x_{610}, x_{612}, x_{77}, x_{82}, x_{813}}\)

Workers-9

1

0.7975

(0.9192, 0.8991, 0.8994)

(0.9197, 0.9064, 0.8898)

(0.8401, 0.8158, 0.7975)

(108.8, 143, 177.2)

(65.7, 90, 125.1)

(37.6, 61, 87.1)

\({x_{16}, x_{111}, x_{113}, x_{28}, x_{29},}\)

\({x_{215}, x_{31}, x_{37}, x_{44}, x_{52},}\)

\({x_{73}, x_{85}, x_{812}, x_{910}, x_{914}}\)

2

0.8208

(0.9371, 0.9380, 0.9279)

(0.8636, 0.8265, 0.8208)

(0.9149, 0.8936, 0.8810)

(104.1, 132, 168.9)

(77.4, 108, 142.2)

(23.6, 47, 74)

\({x_{113}, x_{21}, x_{210}, x_{312}, x_{44},}\)

\({x_{48}, x_{411}, x_{55}, x_{62}, x_{67},}\)

\({x_{714}, x_{89}, x_{93}, x_{96}, x_{915}}\)

3

0.7298

(0.7346, 0.7429, 0.7298)

(0.9093, 0.8960, 0.8809)

(0.9524, 0.9381, 0.9258)

(94.8, 120, 154.2)

(80, 107, 144.8)

(43.8, 69, 95.1)

\({x_{14}, x_{112}, x_{115}, x_{23}, x_{29},}\)

\({x_{211}, x_{38}, x_{314}, x_{46}, x_{51},}\)

\({x_{62}, x_{75}, x_{713}, x_{97}, x_{910}}\)

4

0.7653

(0.8351, 0.8271, 0.8083)

(0.8056, 0.7922, 0.7653)

(0.9626, 0.9497, 0.9435)

(81.8, 107, 141.2)

(105.2, 134, 176.3)

(39.8, 65, 90.2)

\({x_{17}, x_{18}, x_{111}, x_{23}, x_{26},}\)

\({x_{34}, x_{314}, x_{45}, x_{49}, x_{413},}\)

\({x_{52}, x_{512}, x_{61}, x_{910}, x_{915}}\)

5

0.7615

(0.8495, 0.8517, 0.8607)

(0.9718, 0.9657, 0.9585)

(0.8275, 0.7847, 0.7615)

(90.8, 111.6, 145.7)

(94.3, 124, 164.5)

(28.7, 53, 80)

\({x_{111}, x_{38}, x_{314}, x_{44}, x_{46},}\)

\({x_{413}, x_{55}, x_{61}, x_{610}, x_{77},}\)

\({x_{89}, x_{812}, x_{815}, x_{92}, x_{93}}\)