Package java.io

Examples of java.io.DataOutput


    compound.put("age", 42);
   
    compound.put(NbtFactory.ofList("nicknames", "a", "b", "c"));

    ByteArrayOutputStream buffer = new ByteArrayOutputStream();
    DataOutput test = new DataOutputStream(buffer);
    compound.write(test);

    ByteArrayInputStream source = new ByteArrayInputStream(buffer.toByteArray());
    DataInput input = new DataInputStream(source);
   
View Full Code Here


            try {
                socket = new Socket(address, syncPort);
                LOG.info("sync connected to " + socket.getInetAddress().getHostAddress() + " port " + socket.getLocalPort());

                final CRC32 crc32 = new CRC32();
                final DataOutput output = new DataOutputStream(new CheckedOutputStream(socket.getOutputStream(), crc32));
                final DataInput input = new DataInputStream(socket.getInputStream());
                output.writeByte(INIT);
                long logId = input.readLong();
                do {
                    final long nextLogId = logId + 1;
                    final File file = Util.logFile(nextLogId);
                    if (file.exists() && server.getLogger().isWritten(nextLogId)) {
                        logId++;

                        output.writeByte(RECOVERY_LOG);
                        crc32.reset();
                        output.writeLong(logId);

                        LOG.info("sending recovery file: " + file.getName());
                        final BufferedInputStream fileInput = new BufferedInputStream(new FileInputStream(file));

                        final byte[] buffer = new byte[8092];
                        int read;
                        while ((read = fileInput.read(buffer)) > 0) {
                            output.writeInt(read);
                            output.write(buffer, 0, read);
                        }
                        output.writeInt(0);

                        output.writeLong(crc32.getValue());
                    }
                    try {
                        Thread.sleep(300);
                    } catch (final InterruptedException ignore) {
                    }
View Full Code Here

    }

    private void syncConnection(final Socket connection, final int readTimeout) {
        try {
            final CRC32 crc32 = new CRC32();
            final DataOutput output = new DataOutputStream(connection.getOutputStream());
            final DataInput input = new DataInputStream(new CheckedInputStream(connection.getInputStream(), crc32));

            if (input.readByte() != INIT) {
                return;
            }

            final LogRange logFileRange = Util.logFileRange();
            final long lastId = logFileRange.noLogFile() ? -1 : logFileRange.getLast();
            output.writeLong(lastId);
            do {
                if (input.readByte() != RECOVERY_LOG) {
                    return;
                }
                crc32.reset();
View Full Code Here

                dataOut.writeByte(type);
                bs.marshal(dataOut);
                dsm.tightMarshal2(this, c, dataOut, bs);

            } else {
                DataOutput looseOut = dataOut;

                if (!sizePrefixDisabled) {
                    bytesOut.restart();
                    looseOut = bytesOut;
                }

                looseOut.writeByte(type);
                dsm.looseMarshal(this, c, looseOut);

                if (!sizePrefixDisabled) {
                    ByteSequence sequence = bytesOut.toByteSequence();
                    dataOut.writeInt(sequence.getLength());
View Full Code Here

      // write the root level
      long rootLevelIndexPos = out.getPos();

      {
        DataOutput blockStream =
            blockWriter.startWriting(BlockType.ROOT_INDEX);
        rootChunk.writeRoot(blockStream);
        if (midKeyMetadata != null)
          blockStream.write(midKeyMetadata);
        blockWriter.writeHeaderAndData(out);
      }

      // Add root index block size
      totalBlockOnDiskSize += blockWriter.getOnDiskSizeWithoutHeader();
View Full Code Here

    f.delete();
    f.createNewFile();
    f.deleteOnExit();

    OutputStream fileOutStream = new FileOutputStream(f);
    DataOutput outStream = new DataOutputStream(fileOutStream);

    HCatRecord[]  recs = getHCatRecords();
    for(int i =0; i < recs.length; i++){
      recs[i].write(outStream);
    }
View Full Code Here

  private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException {
    try {
      Path resFile = new Path(showGrantDesc.getResFile());
      FileSystem fs = resFile.getFileSystem(conf);
      DataOutput outStream = fs.create(resFile);
      PrincipalDesc principalDesc = showGrantDesc.getPrincipalDesc();
      PrivilegeObjectDesc hiveObjectDesc = showGrantDesc.getHiveObj();
      String principalName = principalDesc.getName();
      if (hiveObjectDesc == null) {
        List<HiveObjectPrivilege> users = db.showPrivilegeGrant(
            HiveObjectType.GLOBAL, principalName, principalDesc.getType(),
            null, null, null, null);
        if (users != null && users.size() > 0) {
          boolean first = true;
          for (HiveObjectPrivilege usr : users) {
            if (!first) {
              outStream.write(terminator);
            } else {
              first = false;
            }

            writeGrantInfo(outStream, principalDesc.getType(), principalName,
                null, null, null, null, usr.getGrantInfo());

          }
        }
      } else {
        String obj = hiveObjectDesc.getObject();
        boolean notFound = true;
        String dbName = null;
        String tableName = null;
        Table tableObj = null;
        Database dbObj = null;

        if (hiveObjectDesc.getTable()) {
          String[] dbTab = obj.split("\\.");
          if (dbTab.length == 2) {
            dbName = dbTab[0];
            tableName = dbTab[1];
          } else {
            dbName = db.getCurrentDatabase();
            tableName = obj;
          }
          dbObj = db.getDatabase(dbName);
          tableObj = db.getTable(dbName, tableName);
          notFound = (dbObj == null || tableObj == null);
        } else {
          dbName = hiveObjectDesc.getObject();
          dbObj = db.getDatabase(dbName);
          notFound = (dbObj == null);
        }
        if (notFound) {
          throw new HiveException(obj + " can not be found");
        }

        String partName = null;
        List<String> partValues = null;
        if (hiveObjectDesc.getPartSpec() != null) {
          partName = Warehouse
              .makePartName(hiveObjectDesc.getPartSpec(), false);
          partValues = Warehouse.getPartValuesFromPartName(partName);
        }

        if (!hiveObjectDesc.getTable()) {
          // show database level privileges
          List<HiveObjectPrivilege> dbs = db.showPrivilegeGrant(HiveObjectType.DATABASE, principalName,
              principalDesc.getType(), dbName, null, null, null);
          if (dbs != null && dbs.size() > 0) {
            boolean first = true;
            for (HiveObjectPrivilege db : dbs) {
              if (!first) {
                outStream.write(terminator);
              } else {
                first = false;
              }

              writeGrantInfo(outStream, principalDesc.getType(), principalName,
                  dbName, null, null, null, db.getGrantInfo());

            }
          }

        } else {
          if (showGrantDesc.getColumns() != null) {
            // show column level privileges
            for (String columnName : showGrantDesc.getColumns()) {
              List<HiveObjectPrivilege> columnss = db.showPrivilegeGrant(
                  HiveObjectType.COLUMN, principalName,
                  principalDesc.getType(), dbName, tableName, partValues,
                  columnName);
              if (columnss != null && columnss.size() > 0) {
                boolean first = true;
                for (HiveObjectPrivilege col : columnss) {
                  if (!first) {
                    outStream.write(terminator);
                  } else {
                    first = false;
                  }

                  writeGrantInfo(outStream, principalDesc.getType(),
                      principalName, dbName, tableName, partName, columnName,
                      col.getGrantInfo());
                }
              }
            }
          } else if (hiveObjectDesc.getPartSpec() != null) {
            // show partition level privileges
            List<HiveObjectPrivilege> parts = db.showPrivilegeGrant(
                HiveObjectType.PARTITION, principalName, principalDesc
                    .getType(), dbName, tableName, partValues, null);
            if (parts != null && parts.size() > 0) {
              boolean first = true;
              for (HiveObjectPrivilege part : parts) {
                if (!first) {
                  outStream.write(terminator);
                } else {
                  first = false;
                }

                writeGrantInfo(outStream, principalDesc.getType(),
                    principalName, dbName, tableName, partName, null, part.getGrantInfo());

              }
            }
          } else {
            // show table level privileges
            List<HiveObjectPrivilege> tbls = db.showPrivilegeGrant(
                HiveObjectType.TABLE, principalName, principalDesc.getType(),
                dbName, tableName, null, null);
            if (tbls != null && tbls.size() > 0) {
              boolean first = true;
              for (HiveObjectPrivilege tbl : tbls) {
                if (!first) {
                  outStream.write(terminator);
                } else {
                  first = false;
                }

                writeGrantInfo(outStream, principalDesc.getType(),
View Full Code Here

        List<Role> roles = db.showRoleGrant(roleDDLDesc.getName(), roleDDLDesc
            .getPrincipalType());
        if (roles != null && roles.size() > 0) {
          Path resFile = new Path(roleDDLDesc.getResFile());
          FileSystem fs = resFile.getFileSystem(conf);
          DataOutput outStream = fs.create(resFile);
          for (Role role : roles) {
            outStream.writeBytes("role name:" + role.getRoleName());
            outStream.write(terminator);
          }
          ((FSDataOutputStream) outStream).close();
        }
      } else {
        throw new HiveException("Unkown role operation "
View Full Code Here

    // write the results in the file
    try {
      Path resFile = new Path(showParts.getResFile());
      FileSystem fs = resFile.getFileSystem(conf);
      DataOutput outStream = fs.create(resFile);
      Iterator<String> iterParts = parts.iterator();

      while (iterParts.hasNext()) {
        // create a row per partition name
        outStream.writeBytes(iterParts.next());
        outStream.write(terminator);
      }
      ((FSDataOutputStream) outStream).close();
    } catch (FileNotFoundException e) {
      LOG.info("show partitions: " + stringifyException(e));
      throw new HiveException(e.toString());
View Full Code Here

    // write the results in the file
    try {
      Path resFile = new Path(showIndexes.getResFile());
      FileSystem fs = resFile.getFileSystem(conf);
      DataOutput outStream = fs.create(resFile);

      if (showIndexes.isFormatted()) {
        // column headers
        outStream.writeBytes(MetaDataFormatUtils.getIndexColumnsHeader());
        outStream.write(terminator);
        outStream.write(terminator);
      }

      for (Index index : indexes)
      {
        outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(index));
      }

      ((FSDataOutputStream) outStream).close();

    } catch (FileNotFoundException e) {
View Full Code Here

TOP

Related Classes of java.io.DataOutput

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.