@@ -99,7 +99,7 @@ discard block |
||
99 | 99 | } |
100 | 100 | foreach ($data as $row) { |
101 | 101 | $string = implode("</td><td>", $row); |
102 | - $string = '<tr><td>' . $string . '</td></tr>'; |
|
102 | + $string = '<tr><td>'.$string.'</td></tr>'; |
|
103 | 103 | if ($encoding != 'utf-8') { |
104 | 104 | $string = api_convert_encoding($string, $encoding, $systemEncoding); |
105 | 105 | } |
@@ -173,7 +173,7 @@ discard block |
||
173 | 173 | fwrite($handle, '<'.$wrapper_tagname.'>'); |
174 | 174 | } |
175 | 175 | $s = self::_export_complex_table_xml_helper($data); |
176 | - fwrite($handle,$s); |
|
176 | + fwrite($handle, $s); |
|
177 | 177 | if (!is_null($wrapper_tagname)) { |
178 | 178 | fwrite($handle, '</'.$wrapper_tagname.'>'."\n"); |
179 | 179 | } |
@@ -195,10 +195,10 @@ discard block |
||
195 | 195 | } |
196 | 196 | $string = ''; |
197 | 197 | foreach ($data as $row) { |
198 | - $string .= "\n".str_repeat("\t",$level).'<'.$row['name'].'>'; |
|
198 | + $string .= "\n".str_repeat("\t", $level).'<'.$row['name'].'>'; |
|
199 | 199 | if (is_array($row['value'])) { |
200 | - $string .= self::_export_complex_table_xml_helper($row['value'],$level+1)."\n"; |
|
201 | - $string .= str_repeat("\t",$level).'</'.$row['name'].'>'; |
|
200 | + $string .= self::_export_complex_table_xml_helper($row['value'], $level + 1)."\n"; |
|
201 | + $string .= str_repeat("\t", $level).'</'.$row['name'].'>'; |
|
202 | 202 | } else { |
203 | 203 | $string .= $row['value']; |
204 | 204 | $string .= '</'.$row['name'].'>'; |
@@ -110,10 +110,10 @@ discard block |
||
110 | 110 | { |
111 | 111 | $old_locale = setlocale(LC_ALL, null); |
112 | 112 | $code = api_get_language_isocode(); |
113 | - $locale_list = array($code.'.utf8', 'en.utf8','en_US.utf8','en_GB.utf8'); |
|
113 | + $locale_list = array($code.'.utf8', 'en.utf8', 'en_US.utf8', 'en_GB.utf8'); |
|
114 | 114 | $try_sort = false; |
115 | 115 | |
116 | - foreach($locale_list as $locale) { |
|
116 | + foreach ($locale_list as $locale) { |
|
117 | 117 | $my_local = setlocale(LC_COLLATE, $locale); |
118 | 118 | if ($my_local) { |
119 | 119 | $try_sort = true; |
@@ -151,7 +151,7 @@ discard block |
||
151 | 151 | $flatten = array(); |
152 | 152 | array_walk_recursive( |
153 | 153 | $array, |
154 | - function ($value) use (&$flatten) { |
|
154 | + function($value) use (&$flatten) { |
|
155 | 155 | $flatten[] = $value; |
156 | 156 | } |
157 | 157 | ); |
@@ -176,7 +176,7 @@ discard block |
||
176 | 176 | |
177 | 177 | $sql = "SELECT MAX(description_type) as MAX |
178 | 178 | FROM $tbl_course_description |
179 | - WHERE c_id = $course_id AND session_id='" . $this->session_id . "'"; |
|
179 | + WHERE c_id = $course_id AND session_id='".$this->session_id."'"; |
|
180 | 180 | $rs = Database::query($sql); |
181 | 181 | $max = Database::fetch_array($rs); |
182 | 182 | $description_type = $max['MAX'] + 1; |
@@ -211,7 +211,7 @@ discard block |
||
211 | 211 | 'session_id' => $this->session_id |
212 | 212 | ]; |
213 | 213 | |
214 | - $last_id = Database::insert($table, $params); |
|
214 | + $last_id = Database::insert($table, $params); |
|
215 | 215 | |
216 | 216 | if ($last_id > 0) { |
217 | 217 | $sql = "UPDATE $table SET id = iid WHERE iid = $last_id"; |
@@ -319,8 +319,8 @@ discard block |
||
319 | 319 | $sql = "DELETE FROM $tbl_course_description |
320 | 320 | WHERE |
321 | 321 | c_id = $course_id AND |
322 | - id = '" . intval($this->id) . "' AND |
|
323 | - session_id = '" . intval($this->session_id) . "'"; |
|
322 | + id = '".intval($this->id)."' AND |
|
323 | + session_id = '" . intval($this->session_id)."'"; |
|
324 | 324 | $result = Database::query($sql); |
325 | 325 | $affected_rows = Database::affected_rows($result); |
326 | 326 | if ($this->id > 0) { |
@@ -349,7 +349,7 @@ discard block |
||
349 | 349 | $course_id = api_get_course_int_id(); |
350 | 350 | |
351 | 351 | $sql = "SELECT id FROM $tbl_course_description |
352 | - WHERE c_id = $course_id AND description_type = '" . intval($description_type) . "'"; |
|
352 | + WHERE c_id = $course_id AND description_type = '".intval($description_type)."'"; |
|
353 | 353 | $rs = Database::query($sql); |
354 | 354 | $row = Database::fetch_array($rs); |
355 | 355 | $description_id = $row['id']; |
@@ -6,7 +6,7 @@ discard block |
||
6 | 6 | */ |
7 | 7 | |
8 | 8 | require_once 'xapian.php'; |
9 | -require_once dirname(__FILE__) . '/../IndexableChunk.class.php'; |
|
9 | +require_once dirname(__FILE__).'/../IndexableChunk.class.php'; |
|
10 | 10 | |
11 | 11 | /** |
12 | 12 | * Abstract helper class |
@@ -130,7 +130,7 @@ discard block |
||
130 | 130 | if (!empty($chunk->terms)) { |
131 | 131 | foreach ($chunk->terms as $term) { |
132 | 132 | /* FIXME: think of getting weight */ |
133 | - $doc->add_term($term['flag'] . $term['name'], 1); |
|
133 | + $doc->add_term($term['flag'].$term['name'], 1); |
|
134 | 134 | } |
135 | 135 | } |
136 | 136 | |
@@ -215,7 +215,7 @@ discard block |
||
215 | 215 | $doc->clear_terms(); |
216 | 216 | foreach ($terms as $term) { |
217 | 217 | //add directly |
218 | - $doc->add_term($prefix . $term, 1); |
|
218 | + $doc->add_term($prefix.$term, 1); |
|
219 | 219 | } |
220 | 220 | $this->db->replace_document($did, $doc); |
221 | 221 | $this->db->flush(); |
@@ -8,11 +8,11 @@ discard block |
||
8 | 8 | * Code |
9 | 9 | */ |
10 | 10 | require_once 'xapian.php'; |
11 | -require_once dirname(__FILE__) . '/../IndexableChunk.class.php'; |
|
11 | +require_once dirname(__FILE__).'/../IndexableChunk.class.php'; |
|
12 | 12 | //TODO: think another way without including specific fields here |
13 | -require_once api_get_path(LIBRARY_PATH) . 'specific_fields_manager.lib.php'; |
|
13 | +require_once api_get_path(LIBRARY_PATH).'specific_fields_manager.lib.php'; |
|
14 | 14 | |
15 | -define('XAPIAN_DB', api_get_path(SYS_UPLOAD_PATH) . 'plugins/xapian/searchdb/'); |
|
15 | +define('XAPIAN_DB', api_get_path(SYS_UPLOAD_PATH).'plugins/xapian/searchdb/'); |
|
16 | 16 | |
17 | 17 | /** |
18 | 18 | * Queries the database. |
@@ -95,7 +95,7 @@ discard block |
||
95 | 95 | |
96 | 96 | // process each specific field prefix |
97 | 97 | foreach ($specific_fields as $specific_field) { |
98 | - $results[$count]['sf-' . $specific_field['code']] = xapian_get_doc_terms($document, $specific_field['code']); |
|
98 | + $results[$count]['sf-'.$specific_field['code']] = xapian_get_doc_terms($document, $specific_field['code']); |
|
99 | 99 | } |
100 | 100 | |
101 | 101 | // rest of data |
@@ -261,6 +261,6 @@ discard block |
||
261 | 261 | } else { |
262 | 262 | $message_error = get_lang('SearchOtherXapianError'); |
263 | 263 | } |
264 | - $display_message = get_lang('Error') . ' : ' . $message_error; |
|
264 | + $display_message = get_lang('Error').' : '.$message_error; |
|
265 | 265 | Display::display_error_message($display_message); |
266 | 266 | } |
@@ -7,7 +7,7 @@ discard block |
||
7 | 7 | /** |
8 | 8 | * code |
9 | 9 | */ |
10 | -require_once dirname(__FILE__) . '/../../global.inc.php'; |
|
10 | +require_once dirname(__FILE__).'/../../global.inc.php'; |
|
11 | 11 | include_once 'xapian/XapianIndexer.class.php'; |
12 | 12 | |
13 | 13 | /** |
@@ -39,7 +39,7 @@ discard block |
||
39 | 39 | if ((count(array_diff($terms, $stored_terms)) == 0) && (count(array_diff($stored_terms, $terms)) == 0)) |
40 | 40 | return FALSE; |
41 | 41 | |
42 | - require_once api_get_path(LIBRARY_PATH) . 'search/xapian/XapianQuery.php'; |
|
42 | + require_once api_get_path(LIBRARY_PATH).'search/xapian/XapianQuery.php'; |
|
43 | 43 | |
44 | 44 | // compare terms |
45 | 45 | $doc = $this->get_document($search_did); |
@@ -55,10 +55,10 @@ discard block |
||
55 | 55 | |
56 | 56 | // save it to search engine |
57 | 57 | foreach ($missing_terms as $term) { |
58 | - $this->add_term_to_doc($prefix . $term, $doc); |
|
58 | + $this->add_term_to_doc($prefix.$term, $doc); |
|
59 | 59 | } |
60 | 60 | foreach ($deprecated_terms as $term) { |
61 | - $this->remove_term_from_doc($prefix . $term, $doc); |
|
61 | + $this->remove_term_from_doc($prefix.$term, $doc); |
|
62 | 62 | } |
63 | 63 | |
64 | 64 | // don't do anything if no change |
@@ -74,7 +74,7 @@ discard block |
||
74 | 74 | * @return array Array of terms |
75 | 75 | */ |
76 | 76 | function get_terms_on_db($prefix, $course_code, $tool_id, $ref_id) { |
77 | - require_once api_get_path(LIBRARY_PATH) . 'specific_fields_manager.lib.php'; |
|
77 | + require_once api_get_path(LIBRARY_PATH).'specific_fields_manager.lib.php'; |
|
78 | 78 | $terms = get_specific_field_values_list_by_prefix($prefix, $course_code, $tool_id, $ref_id); |
79 | 79 | $prefix_terms = array(); |
80 | 80 | foreach ($terms as $term) { |
@@ -18,12 +18,12 @@ discard block |
||
18 | 18 | * @param array extra Extra queries to join with. Optional |
19 | 19 | * @return array |
20 | 20 | */ |
21 | -function chamilo_query_query($query_string, $offset=0, $length=10, $extra=NULL) { |
|
21 | +function chamilo_query_query($query_string, $offset = 0, $length = 10, $extra = NULL) { |
|
22 | 22 | list($count, $results) = xapian_query($query_string, NULL, $offset, $length, $extra); |
23 | 23 | return chamilo_preprocess_results($results); |
24 | 24 | } |
25 | 25 | |
26 | -function chamilo_query_simple_query($query_string, $offset=0, $length=10, $extra=NULL) { |
|
26 | +function chamilo_query_simple_query($query_string, $offset = 0, $length = 10, $extra = NULL) { |
|
27 | 27 | return xapian_query($query_string, NULL, $offset, $length, $extra); |
28 | 28 | } |
29 | 29 | |
@@ -42,7 +42,7 @@ discard block |
||
42 | 42 | function chamilo_preprocess_results($results) { |
43 | 43 | // group by toolid |
44 | 44 | $results_by_tool = array(); |
45 | - if (count($results)>0) { |
|
45 | + if (count($results) > 0) { |
|
46 | 46 | |
47 | 47 | foreach ($results as $key => $row) { |
48 | 48 | $results_by_tool[$row['toolid']][] = $row; |
@@ -50,8 +50,8 @@ discard block |
||
50 | 50 | |
51 | 51 | $processed_results = array(); |
52 | 52 | foreach ($results_by_tool as $toolid => $rows) { |
53 | - $tool_processor_class = $toolid .'_processor'; |
|
54 | - $tool_processor_path = api_get_path(LIBRARY_PATH) .'search/tool_processors/'. $tool_processor_class .'.class.php'; |
|
53 | + $tool_processor_class = $toolid.'_processor'; |
|
54 | + $tool_processor_path = api_get_path(LIBRARY_PATH).'search/tool_processors/'.$tool_processor_class.'.class.php'; |
|
55 | 55 | if (file_exists($tool_processor_path)) { |
56 | 56 | require_once($tool_processor_path); |
57 | 57 | $tool_processor = new $tool_processor_class($rows); |
@@ -71,6 +71,6 @@ discard block |
||
71 | 71 | * @param string $op |
72 | 72 | * @return XapianQuery query joined |
73 | 73 | */ |
74 | -function chamilo_join_queries($query1, $query2=NULL, $op='or') { |
|
74 | +function chamilo_join_queries($query1, $query2 = NULL, $op = 'or') { |
|
75 | 75 | return xapian_join_queries($query1, $query2, $op); |
76 | 76 | } |
@@ -17,8 +17,8 @@ discard block |
||
17 | 17 | return; |
18 | 18 | } |
19 | 19 | |
20 | -require_once dirname(__FILE__) . '../../../global.inc.php'; |
|
21 | -require_once api_get_path(LIBRARY_PATH) . 'search/ChamiloQuery.php'; |
|
20 | +require_once dirname(__FILE__).'../../../global.inc.php'; |
|
21 | +require_once api_get_path(LIBRARY_PATH).'search/ChamiloQuery.php'; |
|
22 | 22 | |
23 | 23 | /** |
24 | 24 | * search with filter and build base array avoding repeated terms |
@@ -33,7 +33,7 @@ discard block |
||
33 | 33 | if (is_array($dkterms) && is_array($dkterms[1])) { |
34 | 34 | foreach ($specific_fields as $specific_field) { |
35 | 35 | foreach ($dkterms[1] as $obj) { |
36 | - foreach ($obj['sf-' . $specific_field['code']] as $raw_term) { |
|
36 | + foreach ($obj['sf-'.$specific_field['code']] as $raw_term) { |
|
37 | 37 | if (count($raw_term['name']) > 1) { |
38 | 38 | $normal_term = substr($raw_term['name'], 1); |
39 | 39 | $sf_terms[$specific_field['code']][$normal_term] = $normal_term; |
@@ -54,10 +54,10 @@ discard block |
||
54 | 54 | |
55 | 55 | if (($cid = api_get_course_id()) != -1) { // with cid |
56 | 56 | // course filter |
57 | - $filter[] = chamilo_get_boolean_query(XAPIAN_PREFIX_COURSEID . $cid); |
|
57 | + $filter[] = chamilo_get_boolean_query(XAPIAN_PREFIX_COURSEID.$cid); |
|
58 | 58 | // term filter |
59 | 59 | if ($term != '__all__') { |
60 | - $filter[] = chamilo_get_boolean_query($prefix . $term); |
|
60 | + $filter[] = chamilo_get_boolean_query($prefix.$term); |
|
61 | 61 | // always and between term and courseid |
62 | 62 | $filter = chamilo_join_queries($filter, null, 'and'); |
63 | 63 | } |
@@ -65,7 +65,7 @@ discard block |
||
65 | 65 | $sf_terms = get_usual_sf_terms($filter, $specific_fields); |
66 | 66 | } else { // without cid |
67 | 67 | if ($term != '__all__') { |
68 | - $filter[] = chamilo_get_boolean_query($prefix . $term); |
|
68 | + $filter[] = chamilo_get_boolean_query($prefix.$term); |
|
69 | 69 | |
70 | 70 | $sf_terms = get_usual_sf_terms($filter, $specific_fields); |
71 | 71 | } else { // no cid and all/any terms |
@@ -51,7 +51,7 @@ discard block |
||
51 | 51 | foreach (array_keys($this->_chunks[$type]) as $typekey) { |
52 | 52 | list($key, $data) = explode("\0", $this->_chunks[$type][$typekey]); |
53 | 53 | if (strcmp($key, $check) == 0) { |
54 | - echo 'Key "' . $check . '" already exists in "' . $type . '" chunk.'; |
|
54 | + echo 'Key "'.$check.'" already exists in "'.$type.'" chunk.'; |
|
55 | 55 | return false; |
56 | 56 | } |
57 | 57 | } |
@@ -70,11 +70,11 @@ discard block |
||
70 | 70 | */ |
71 | 71 | public function addChunk($chunkType, $key, $value) { |
72 | 72 | |
73 | - $chunkData = $key . "\0" . $value; |
|
74 | - $crc = pack("N", crc32($chunkType . $chunkData)); |
|
73 | + $chunkData = $key."\0".$value; |
|
74 | + $crc = pack("N", crc32($chunkType.$chunkData)); |
|
75 | 75 | $len = pack("N", strlen($chunkData)); |
76 | 76 | |
77 | - $newChunk = $len . $chunkType . $chunkData . $crc; |
|
77 | + $newChunk = $len.$chunkType.$chunkData.$crc; |
|
78 | 78 | $result = substr($this->_contents, 0, $this->_size - 12) |
79 | 79 | . $newChunk |
80 | 80 | . substr($this->_contents, $this->_size - 12, 12); |
@@ -92,30 +92,30 @@ discard block |
||
92 | 92 | */ |
93 | 93 | public function removeChunks($chunkType, $key, $png) { |
94 | 94 | // Read the magic bytes and verify |
95 | - $retval = substr($png,0,8); |
|
95 | + $retval = substr($png, 0, 8); |
|
96 | 96 | $ipos = 8; |
97 | 97 | if ($retval != "\x89PNG\x0d\x0a\x1a\x0a") |
98 | 98 | throw new Exception('Is not a valid PNG image'); |
99 | 99 | // Loop through the chunks. Byte 0-3 is length, Byte 4-7 is type |
100 | - $chunkHeader = substr($png,$ipos,8); |
|
100 | + $chunkHeader = substr($png, $ipos, 8); |
|
101 | 101 | $ipos = $ipos + 8; |
102 | 102 | while ($chunkHeader) { |
103 | 103 | // Extract length and type from binary data |
104 | 104 | $chunk = @unpack('Nsize/a4type', $chunkHeader); |
105 | 105 | $skip = false; |
106 | - if ( $chunk['type'] == $chunkType ) { |
|
107 | - $data = substr($png,$ipos,$chunk['size']); |
|
106 | + if ($chunk['type'] == $chunkType) { |
|
107 | + $data = substr($png, $ipos, $chunk['size']); |
|
108 | 108 | $sections = explode("\0", $data); |
109 | 109 | print_r($sections); |
110 | - if ( $sections[0] == $key ) $skip = true; |
|
110 | + if ($sections[0] == $key) $skip = true; |
|
111 | 111 | } |
112 | 112 | // Extract the data and the CRC |
113 | - $data = substr($png,$ipos,$chunk['size']+4); |
|
113 | + $data = substr($png, $ipos, $chunk['size'] + 4); |
|
114 | 114 | $ipos = $ipos + $chunk['size'] + 4; |
115 | 115 | // Add in the header, data, and CRC |
116 | - if ( ! $skip ) $retval = $retval . $chunkHeader . $data; |
|
116 | + if (!$skip) $retval = $retval.$chunkHeader.$data; |
|
117 | 117 | // Read next chunk header |
118 | - $chunkHeader = substr($png,$ipos,8); |
|
118 | + $chunkHeader = substr($png, $ipos, 8); |
|
119 | 119 | $ipos = $ipos + 8; |
120 | 120 | } |
121 | 121 | return $retval; |
@@ -131,34 +131,34 @@ discard block |
||
131 | 131 | * If there is PNG information that matches the key an array is returned |
132 | 132 | * |
133 | 133 | */ |
134 | - public function extractBadgeInfo($png, $key='openbadges') { |
|
134 | + public function extractBadgeInfo($png, $key = 'openbadges') { |
|
135 | 135 | // Read the magic bytes and verify |
136 | - $retval = substr($png,0,8); |
|
136 | + $retval = substr($png, 0, 8); |
|
137 | 137 | $ipos = 8; |
138 | 138 | if ($retval != "\x89PNG\x0d\x0a\x1a\x0a") { |
139 | 139 | return false; |
140 | 140 | } |
141 | 141 | |
142 | 142 | // Loop through the chunks. Byte 0-3 is length, Byte 4-7 is type |
143 | - $chunkHeader = substr($png,$ipos,8); |
|
143 | + $chunkHeader = substr($png, $ipos, 8); |
|
144 | 144 | $ipos = $ipos + 8; |
145 | 145 | while ($chunkHeader) { |
146 | 146 | // Extract length and type from binary data |
147 | 147 | $chunk = @unpack('Nsize/a4type', $chunkHeader); |
148 | 148 | $skip = false; |
149 | 149 | if ($chunk['type'] == 'tEXt') { |
150 | - $data = substr($png,$ipos,$chunk['size']); |
|
150 | + $data = substr($png, $ipos, $chunk['size']); |
|
151 | 151 | $sections = explode("\0", $data); |
152 | 152 | if ($sections[0] == $key) { |
153 | 153 | return $sections; |
154 | 154 | } |
155 | 155 | } |
156 | 156 | // Extract the data and the CRC |
157 | - $data = substr($png,$ipos,$chunk['size']+4); |
|
157 | + $data = substr($png, $ipos, $chunk['size'] + 4); |
|
158 | 158 | $ipos = $ipos + $chunk['size'] + 4; |
159 | 159 | |
160 | 160 | // Read next chunk header |
161 | - $chunkHeader = substr($png,$ipos,8); |
|
161 | + $chunkHeader = substr($png, $ipos, 8); |
|
162 | 162 | $ipos = $ipos + 8; |
163 | 163 | } |
164 | 164 | } |