001package com.astrolabsoftware.FinkBrowser.HBaser; 002 003import com.Lomikel.HBaser.HBaseClient; 004import com.Lomikel.HBaser.HBaseSQLClient; 005import com.Lomikel.Utils.DateTimeManagement; 006import com.Lomikel.Utils.Pair; 007import com.Lomikel.Utils.LomikelException; 008 009// HealPix 010import cds.healpix.Healpix; 011import cds.healpix.HealpixNested; 012import cds.healpix.HealpixNestedFixedRadiusConeComputer; 013import cds.healpix.HealpixNestedBMOC; 014import cds.healpix.FlatHashIterator; 015import static cds.healpix.VerticesAndPathComputer.LON_INDEX; 016import static cds.healpix.VerticesAndPathComputer.LAT_INDEX; 017 018// HBase 019import org.apache.hadoop.hbase.TableExistsException; 020 021// Java 022import java.lang.Math; 023import java.util.Map; 024import java.util.TreeMap; 025import java.util.Set; 026import java.util.TreeSet; 027import java.util.List; 028import java.util.ArrayList; 029import java.util.LinkedHashSet; 030import java.util.stream.Collectors; 031import java.io.IOException; 032 033// Log4J 034import org.apache.logging.log4j.Logger; 035import org.apache.logging.log4j.LogManager; 036 037/** <code>FinkHBaseClient</code> handles connectionto HBase table 038 * with specific Fink functionality. 039 * It expects the main table with schema and two schemaless aux tables: 040 * <ul> 041 * <li><b>*.jd</b> table with <code>key = jd.alert</code> and one 042 * column <code>i:objectId</code>.</li> 043 * <li><b>*.pixel</b> table with <code>key = pixel_jd</code> and 044 * columns <code>i:objectId,i:dec,i:ra</code><li> 045 * </ul> 046 * @opt attributes 047 * @opt operations 048 * @opt types 049 * @opt visibility 050 * @author <a href="mailto:Julius.Hrivnac@cern.ch">J.Hrivnac</a> */ 051public class FinkHBaseClient extends HBaseSQLClient { 052 053 /** Create. 054 * @param zookeepers The comma-separated list of zookeper ids. 055 * @param clientPort The client port. 056 * @throws LomikelException If anything goes wrong. */ 057 public FinkHBaseClient(String zookeepers, 058 String clientPort) throws LomikelException { 059 super(zookeepers, clientPort); 060 //setFinkEvaluatorFunctions(); 061 } 062 063 /** Create. 064 * @param zookeepers The comma-separated list of zookeper ids. 065 * @param clientPort The client port. 066 * @throws LomikelException If anything goes wrong. */ 067 public FinkHBaseClient(String zookeepers, 068 int clientPort) throws LomikelException { 069 super(zookeepers, clientPort); 070 //setFinkEvaluatorFunctions(); 071 } 072 073 /** Create. 074 * @param url The HBase url. 075 * @throws LomikelException If anything goes wrong. */ 076 public FinkHBaseClient(String url) throws LomikelException { 077 super(url); 078 //setFinkEvaluatorFunctions(); 079 } 080 081 /** Create on <em>localhost</em>. 082 * @throws LomikelException If anything goes wrong. */ 083 // TBD: is it needed, does it work ok ? 084 public FinkHBaseClient() throws LomikelException { 085 super(null, null); 086 setFinkEvaluatorFunctions(); 087 } 088 089 /** Setup the default sets of evaluation functions. */ 090 private void setFinkEvaluatorFunctions() { 091 try { 092 evaluator().setEvaluatorFunctions("com.astrolabsoftware.FinkBrowser.HBaser.FinkEvaluatorFunctions", "com/astrolabsoftware/FinkBrowser/HBaser/FinkEvaluatorFunctions.groovy"); 093 evaluator().setEvaluatorFunctions(null, "com/astrolabsoftware/FinkBrowser/WebService/FinkHBaseColumnsProcessor.groovy"); 094 } 095 catch (LomikelException e) { 096 log.error("Cannot set EvaluatorFunctions", e); 097 } 098 } 099 100 /** Get alerts between two Julian dates (inclusive). 101 * @param jdStart The starting Julian date (including day franction). 102 * @param jdStop The stopping Julian date (including day franction). 103 * @param reversed Wheter results should be reversly ordered. 104 * <tt>true</tt> implies that results limits will be counted backwards. 105 * @param filter The names of required values as <tt>family:column,...</tt>. 106 * It can be <tt>null</tt>. 107 * @param ifkey Whether give also entries keys. 108 * @param iftime Whether give also entries timestamps. 109 * @return The {@link Map} of {@link Map}s of results as <tt>key-&t;{family:column->value}</tt>. */ 110 public Map<String, Map<String, String>> search(String jdStart, 111 String jdStop, 112 boolean reversed, 113 String filter, 114 boolean ifkey, 115 boolean iftime) { 116 log.debug("Searching for alerts in jd interval: " + jdStart + " - " + jdStop); 117 Map<String, String> searchMap = jd2keys(jdStart, jdStop, reversed); 118 if (searchMap.isEmpty()) { 119 return new TreeMap<String, Map<String, String>>(); 120 } 121 // searching each entry separately to profit from HBase start/stop row optimisation 122 Map<String, Map<String, String>> allResults = new TreeMap<>(); 123 Map<String, Map<String, String>> aResult; 124 Map<String, String> sMap; 125 for (String key : searchMap.get("key:key:exact").split(",")) { 126 aResult = scan(null, 127 "key:key:" + key + ":exact", 128 filter, 129 0, 130 0, 131 ifkey, 132 iftime); 133 allResults.putAll(aResult); 134 } 135 return allResults; 136 } 137 138 /** Get alerts within a spacial cone (inclusive). 139 * @param ra The central value of ra (in deg). 140 * @param dec The central value of dec (in deg). 141 * @param delta The maximal angular distance from the central direction (in deg). 142 * @param filter The names of required values as <tt>family:column,...</tt>. 143 * It can be <tt>null</tt>. 144 * @param ifkey Whether give also entries keys. 145 * @param iftime Whether give also entries timestamps. 146 * @return The {@link Map} of {@link Map}s of results as <tt>key-&t;{family:column->value}</tt>. */ 147 public Map<String, Map<String, String>> search(double ra, 148 double dec, 149 double delta, 150 String filter, 151 boolean ifkey, 152 boolean iftime) { 153 log.debug("Searching for alerts within " + delta + " deg of (ra, dec) = (" + ra + ", " + dec + ")"); 154 Map<String, String> searchMap = radec2keys(ra, dec, delta); 155 if (searchMap.isEmpty()) { 156 return new TreeMap<String, Map<String, String>>(); 157 } 158 // searching each entry separately to profit from HBase start/stop row optimisation 159 Map<String, Map<String, String>> allResults = new TreeMap<>(); 160 Map<String, Map<String, String>> aResult; 161 Map<String, String> sMap; 162 for (String key : searchMap.get("key:key:exact").split(",")) { 163 aResult = scan(null, 164 "key:key:" + key + ":exact", 165 filter, 166 0, 167 0, 168 ifkey, 169 iftime); 170 allResults.putAll(aResult); 171 } 172 return allResults; 173 } 174 175 /** Give all objectIds corresponding to specified Julian Date. 176 * It uses *.jd table. 177 * @param jd The Julian Data (with day fraction). 178 * @param reversed Wheter results should be reversly ordered. 179 * <tt>true</tt> implies that results limits will be counted backwards. 180 * @return The {@link Map} of corresponding keys of the main table, 181 * in the format expected for the scan methods. */ 182 public Map<String, String> jd2keys(String jd, 183 boolean reversed) { 184 Map<String, String> searchMap = new TreeMap<>(); 185 try { 186 HBaseClient client = new HBaseClient(zookeepers(), clientPort()); 187 client.connect(tableName() + ".jd"); 188 client.setReversed(reversed); 189 client.setLimit(limit()); 190 client.setSearchLimit(searchLimit()); 191 Map<String, Map<String, String>> results = client.scan(null, 192 "key:key:" + jd, 193 null, 194 0, 195 0, 196 false, 197 false); 198 String keys = results.keySet().stream().map(m -> {String[] key = m.split("_"); return key[1] + "_" + key[0];}).collect(Collectors.joining(",")); 199 if (keys != null && !keys.trim().equals("")) { 200 searchMap.put("key:key:exact", keys); 201 } 202 client.close(); 203 } 204 catch (LomikelException e) { 205 log.error("Cannot search", e); 206 } 207 return searchMap; 208 } 209 210 /** Give all objectIds between two specified Julian Dates (inclusive). 211 * It uses *.jd table. 212 * @param jdStart The start Julian Data (with day fraction), evaluated as literal prefix scan. 213 * @param jdStart The stop Julian Data (with day fraction), evaluated as literal prefix scan. 214 * @param reversed Wheter results should be reversly ordered. 215 * <tt>true</tt> implies that results limits will be counted backwards. 216 * @return The {@link Map} of corresponding keys of the main table, 217 * in the format expected for the scan methods. */ 218 public Map<String, String> jd2keys(String jdStart, 219 String jdStop, 220 boolean reversed) { 221 Map<String, String> searchMap = new TreeMap<>(); 222 try { 223 HBaseClient client = new HBaseClient(zookeepers(), clientPort()); 224 client.connect(tableName() + ".jd"); 225 client.setRangeScan(true); 226 client.setReversed(reversed); 227 client.setLimit(limit()); 228 client.setSearchLimit(searchLimit()); 229 Map<String, Map<String, String>> results = client.scan(null, 230 "key:key:" + jdStart + ":prefix," + "key:key:" + jdStop + ":prefix", 231 null, 232 0, 233 0, 234 false, 235 false); 236 String keys = results.keySet().stream().map(m -> {String[] key = m.split("_"); return key[1] + "_" + key[0];}).collect(Collectors.joining(",")); 237 if (keys != null && !keys.trim().equals("")) { 238 searchMap.put("key:key:exact", keys); 239 } 240 client.close(); 241 } 242 catch (LomikelException e) { 243 log.error("Cannot search", e); 244 } 245 return searchMap; 246 } 247 248 /** Give all objectIds within a spacial cone. 249 * It uses *.pixel table. 250 * @param ra The central value of ra/lon (in deg). 251 * @param dec The central value of dec/lat (in deg). 252 * @param delta The maximal angular distance from the central direction (in deg). 253 * @return The {@link Map} of corresponding keys of the main table, 254 * in the format expected for the scan methods. */ 255 public Map<String, String> radec2keys(double ra, 256 double dec, 257 double delta) { 258 double coneCenterLon = Math.toRadians(ra); 259 double coneCenterLat = Math.toRadians(dec); 260 double coneRadiusDel = Math.toRadians(delta); 261 //HealpixNestedFixedRadiusConeComputer cc = _hn.newConeComputer(coneRadiusDel); // beta code!! 262 HealpixNestedFixedRadiusConeComputer cc = _hn.newConeComputerApprox(coneRadiusDel); // robust code 263 HealpixNestedBMOC bmoc = cc.overlappingCenters(coneCenterLon, coneCenterLat); 264 String pixs = "" + _hn.toRing(_hn.hash(coneCenterLon, coneCenterLat)); 265 log.debug("Central pixel: " + pixs); 266 int n = 0; 267 FlatHashIterator hIt = bmoc.flatHashIterator(); 268 //while (hIt.hasNext()) { 269 // pixs += _hn.toRing(hIt.next()) + ","; 270 // n++; 271 // } 272 for (HealpixNestedBMOC.CurrentValueAccessor cell : bmoc) { 273 // cell.getDepth(), cell.isFull(), cell.getRawValue() 274 pixs += "," + _hn.toRing(cell.getHash()); 275 n++; 276 } 277 log.debug("" + n + " cells found (using nside = " + _NSIDE + ", depth = " + Healpix.depth(_NSIDE) + ")"); 278 Map<String, String> pixMap = new TreeMap<>(); 279 pixMap.put("key:key:prefix", pixs); 280 Map<String, String> searchMap = new TreeMap<>(); 281 try { 282 HBaseClient client = new HBaseClient(zookeepers(), clientPort()); 283 client.connect(tableName() + ".pixel", null); 284 client.setLimit(limit()); 285 client.setSearchLimit(searchLimit()); 286 Map<String, Map<String, String>> results = client.scan(null, 287 pixMap, 288 "i:objectId", 289 0, 290 0, 291 false, 292 false); 293 //log.info(results); 294 String keys = results.values().stream().map(m -> m.get("i:objectId")).collect(Collectors.joining(",")); 295 if (keys != null && !keys.trim().equals("")) { 296 searchMap.put("key:key:prefix", keys); 297 } 298 client.close(); 299 } 300 catch (LomikelException e) { 301 log.error("Cannot search", e); 302 } 303 return searchMap; 304 } 305 306 /** Give the timeline for the column. It makes use of the Julian Date alert time 307 * instead of HBase timestamp. 308 * @param columnName The name of the column. 309 * @param search The search terms as <tt>family:column:value,...</tt>. 310 * Key can be searched with <tt>family:column = key:key<tt> "pseudo-name". 311 * {@link Comparator} can be chosen as <tt>family:column:value:comparator</tt> 312 * among <tt>exact,prefix,substring,regex</tt>. 313 * The default for key is <tt>prefix</tt>, 314 * the default for columns is <tt>substring</tt>. 315 * It can be <tt>null</tt>. 316 * All searches are executed as prefix searches. 317 * @return The {@link Set} of {@link Pair}s of JulianDate-value. */ 318 @Override 319 public Set<Pair<String, String>> timeline(String columnName, 320 String search) { 321 log.debug("Getting alerts timeline of " + columnName + " with " + search); 322 Set<Pair<String, String>> tl = new TreeSet<>(); 323 Map<String, Map<String, String>> results = scan(null, search, columnName + ",i:jd", 0, false, false); 324 Pair<String, String> p; 325 for (Map.Entry<String, Map<String, String>> entry : results.entrySet()) { 326 if (!entry.getKey().startsWith("schema")) { 327 p = Pair.of(entry.getValue().get("i:jd" ), 328 entry.getValue().get(columnName)); 329 tl.add(p); 330 } 331 } 332 return tl; 333 } 334 335 /** Give all recent values of the column. It makes use of the Julian Date alert time 336 * instead of HBase timestamp. 337 * Results are ordered by the Julian Date alert time, so evetual limits on results 338 * number will be apllied backwards in Julian date time. 339 * @param columnName The name of the column. 340 * @param prefixValue The column value prefix to search for. 341 * @param minutes How far into the past it should search. 342 * @param getValues Whether to get column values or row keys. 343 * @return The {@link Set} of different values of that column. */ 344 @Override 345 public Set<String> latests(String columnName, 346 String prefixValue, 347 long minutes, 348 boolean getValues) { 349 log.debug("Getting " + columnName + " of alerts prefixed by " + prefixValue + " from last " + minutes + " minutes"); 350 Set<String> l = new TreeSet<>(); 351 double nowJD = DateTimeManagement.julianDate(); 352 double minJD = nowJD - minutes / 60.0 / 24.0; 353 Map<String, Map<String, String>> results = search(String.valueOf(minJD), 354 String.valueOf(nowJD), 355 true, 356 columnName, 357 false, 358 false); 359 for (Map.Entry<String, Map<String, String>> entry : results.entrySet()) { 360 l.add(getValues ? entry.getValue().get(columnName) : entry.getKey()); 361 } 362 return l; 363 } 364 365 /** Give all recent values of the column. 366 * The original implementation from {@link HBaseClient}. 367 * Results are ordered by the row key, so evetual limits on results 368 * number will be apllied to them and not to the time. 369 * @param columnName The name of the column. 370 * @param substringValue The column value substring to search for. 371 * @param minutes How far into the past it should search (in minutes). 372 * @param getValues Whether to get column values or row keys. 373 * @return The {@link Set} of different values of that column. */ 374 public Set<String> latestsT(String columnName, 375 String prefixValue, 376 long minutes, 377 boolean getValues) { 378 return super.latests(columnName, prefixValue, minutes, getValues); 379 } 380 381 /** Create aux pixel map hash table. 382 * @param keyPrefixSearch The prefix search of row key. 383 * @throws LomikelException If anything goes wrong. 384 * @throws LomikelException If anything goes wrong. */ 385 // BUG: should write numberts with schema 386 public void createPixelTable(String keyPrefixSearch) throws LomikelException, IOException { 387 String pixelTableName = tableName() + ".pixel"; 388 try { 389 create(pixelTableName, new String[]{"i", "b", "d", "a"}); 390 } 391 catch (TableExistsException e) { 392 log.warn("Table " + pixelTableName + " already exists, will be reused"); 393 } 394 HBaseClient pixelClient = new HBaseClient(zookeepers(), clientPort()); 395 pixelClient.connect(pixelTableName, null); 396 Map<String, Map<String, String>> results = scan(null, "key:key:" + keyPrefixSearch + ":prefix", "i:objectId,i:ra,i:dec", 0, false, false); 397 String objectId; 398 String ra; 399 String dec; 400 String key; 401 log.debug("Writing " + pixelTableName + "..."); 402 int n = 0; 403 for (Map.Entry<String, Map<String, String>> entry : results.entrySet()) { 404 objectId = entry.getValue().get("i:objectId"); 405 ra = entry.getValue().get("i:ra"); 406 dec = entry.getValue().get("i:dec"); 407 pixelClient.put(Long.toString(_hn.hash(Math.toRadians(Double.valueOf(ra)), 408 Math.toRadians(Double.valueOf(dec)))) + "_" + objectId, 409 new String[]{"i:ra:" + ra, 410 "i:dec:" + dec, 411 "i:objectId:" + objectId}); 412 System.out.print("."); 413 if (n++ % 100 == 0) { 414 System.out.print(n-1); 415 } 416 } 417 System.out.println(); 418 log.debug("" + n + " rows written"); 419 pixelClient.close(); 420 } 421 422 /** Create aux jd map hash table. 423 * @param keyPrefixSearch The prefix search of row key. 424 * @throws IOException If anything goes wrong. 425 * @throws LomikelException If anything goes wrong. */ 426 // BUG: should write numbers with schema 427 public void createJDTable(String keyPrefixSearch) throws LomikelException, IOException { 428 String jdTableName = tableName() + ".jd"; 429 try { 430 create(jdTableName, new String[]{"i", "b", "d", "a"}); 431 } 432 catch (TableExistsException e) { 433 log.warn("Table " + jdTableName + " already exists, will be reused"); 434 } 435 HBaseClient jdClient = new HBaseClient(zookeepers(), clientPort()); 436 jdClient.connect(jdTableName, null); 437 Map<String, Map<String, String>> results = scan(null, "key:key:" + keyPrefixSearch + ":prefix", "i:objectId,i:jd", 0, false, false); 438 String objectId; 439 String jd; 440 String key; 441 log.debug("Writing " + jdTableName + "..."); 442 int n = 0; 443 for (Map.Entry<String, Map<String, String>> entry : results.entrySet()) { 444 objectId = entry.getValue().get("i:objectId"); 445 jd = entry.getValue().get("i:jd"); 446 jdClient.put(jd + "_" + objectId, 447 new String[]{"i:jd:" + jd, 448 "i:objectId:" + objectId}); 449 System.out.print("."); 450 if (n++ % 100 == 0) { 451 System.out.print(n-1); 452 } 453 } 454 System.out.println(); 455 log.debug("" + n + " rows written"); 456 jdClient.close(); 457 } 458 /** Assemble curves of variable columns from another table 459 * as multi-versioned columns of the current table. 460 * All previous lightcurves for selected <em>objectId</em>s are deleted. 461 * @param sourceClient The {@link HBaseClient} of the source table. 462 * It should be already opened and connected with appropriate schema. 463 * @param objectIds The comma-separated list of <em>objectIds</em> to extract. 464 * @param columns The comma-separated list of columns (incl. families) to extract. 465 * @param schemaName The name of the schema to be created in the new table. 466 * The columns in the new table will belong to the <em>c</em> family 467 * and will have the type of <em>double</em>. */ 468 public void assembleCurves(HBaseClient sourceClient, 469 String objectIds, 470 String columns, 471 String schemaName) { 472 String[] schema = columns.split(","); 473 for (int i = 0; i < schema.length; i++) { 474 schema[i] = "c:" + schema[i].split(":")[1] + ":double"; 475 } 476 try { 477 put(schemaName, schema); 478 } 479 catch (IOException e) { 480 log.error("Cannot create schema " + schemaName + " = " + schema, e); 481 } 482 try { 483 connect(tableName(), schemaName); 484 } 485 catch (LomikelException e) { 486 log.error("Cannot reconnect to " + tableName() + " with new schema", e); 487 } 488 Map<String, Map<String, String>> results; 489 Set<String> curves = new TreeSet<>(); 490 String value; 491 for (String objectId : objectIds.split(",")) { 492 delete(objectId); 493 results = sourceClient.scan(null, "key:key:" + objectId + ":prefix", columns, 0, false, false); 494 log.debug("Adding " + objectId + "[" + results.size() + "]"); 495 for (Map.Entry<String, Map<String, String>> row : results.entrySet()) { 496 curves.clear(); 497 for (Map.Entry<String, String> e : row.getValue().entrySet()) { 498 value = e.getValue(); 499 if (!value.trim().equals("NaN") && !value.trim().equals("null")) { 500 curves.add("c:" + e.getKey().split(":")[1] + ":" + value.trim()); 501 } 502 } 503 try { 504 if (!curves.isEmpty()) { 505 put(objectId, curves.toArray(new String[0])); 506 } 507 } 508 catch (IOException e) { 509 log.error("Cannot insert " + objectId + " = " + curves, e); 510 } 511 } 512 } 513 } 514 515 /** Assemble lightcurves from another table 516 * as multi-versioned columns of the current table. 517 * All previous lightcurves for selected <em>objectId</em>s are deleted. 518 * The colums schema is embedded in this class sourcecode. 519 * @param sourceClient The {@link HBaseClient} of the source table. 520 * It should be already opened and connected with appropriate schema. 521 * @param objectIds The comma-separated list of <em>objectId</em>s to extract. */ 522 public void assembleLightCurves(HBaseClient sourceClient, 523 String objectIds) { 524 String columns = "i:jd,d:lc_features_g,d:lc_features_r"; 525 String schemaName = "schema_lc_0_0_0"; 526 int slength = LIGHTCURVE_SCHEMA.length; 527 String[] schema = new String[2 * slength]; 528 String[] subcolumns = new String[2 * slength]; 529 for (int i = 0; i < slength; i++) { 530 schema[ i ] = "c:lc_g_" + LIGHTCURVE_SCHEMA[i] + ":double"; 531 schema[ i + slength] = "c:lc_r_" + LIGHTCURVE_SCHEMA[i] + ":double"; 532 subcolumns[i ] = "c:lc_g_" + LIGHTCURVE_SCHEMA[i]; 533 subcolumns[i + slength] = "c:lc_r_" + LIGHTCURVE_SCHEMA[i]; 534 } 535 try { 536 put(schemaName, schema); 537 } 538 catch (IOException e) { 539 log.error("Cannot create schema " + schemaName + " = " + schema, e); 540 } 541 try { 542 connect(tableName(), schemaName); 543 } 544 catch (LomikelException e) { 545 log.error("Cannot reconnect to " + tableName() + " with new schema", e); 546 } 547 Map<String, Map<String, String>> results; 548 Set<String> curves = new TreeSet<>(); 549 int i; 550 for (String objectId : objectIds.split(",")) { 551 delete(objectId); 552 results = sourceClient.scan(null, "key:key:" + objectId + ":prefix", columns, 0, false, false); 553 log.debug("Adding " + objectId + "[" + results.size() + "]"); 554 for (Map.Entry<String, Map<String, String>> row : results.entrySet()) { 555 curves.clear(); 556 i = 0; 557 for (Map.Entry<String, String> e : row.getValue().entrySet()) { 558 if (e.getValue().contains("]")) { 559 for (String value : e.getValue().replaceAll("\\[", "").replaceAll("]", "").split(",")) { 560 if (!value.trim().equals("NaN") && !value.trim().equals("null")) { 561 curves.add(subcolumns[i] + ":" + value.trim()); 562 } 563 i++; 564 } 565 } 566 else { 567 curves.add("c:jd:" + e.getValue()); 568 } 569 } 570 try { 571 if (!curves.isEmpty()) { 572 put(objectId, curves.toArray(new String[0])); 573 } 574 } 575 catch (IOException e) { 576 log.error("Cannot insert " + objectId + " = " + curves, e); 577 } 578 } 579 } 580 } 581 582 private static String[] LIGHTCURVE_SCHEMA = new String[]{"lc00", 583 "lc01", 584 "lc02", 585 "lc03", 586 "lc04", 587 "lc05", 588 "lc06", 589 "lc07", 590 "lc08", 591 "lc09", 592 "lc10", 593 "lc11", 594 "lc12", 595 "lc13", 596 "lc14", 597 "lc15", 598 "lc16", 599 "lc17", 600 "lc18", 601 "lc19", 602 "lc20", 603 "lc21", 604 "lc22", 605 "lc23", 606 "lc24", 607 "lc25", 608 "lc26"}; 609 610 private static int _NSIDE = 131072; // BUG: magic number 611 612 private static HealpixNested _hn = Healpix.getNested(Healpix.depth(_NSIDE)); 613 614 /** Logging . */ 615 private static Logger log = LogManager.getLogger(FinkHBaseClient.class); 616 617 }