package com.ourhome.flink.test

import java.text.SimpleDateFormat
import java.util.{Date, Properties}

import org.apache.kafka.clients.producer.{Callback, KafkaProducer, ProducerRecord, RecordMetadata}
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.log4j.Logger

import scala.io.Source

object kafkaProducer {
  private val KAFKA_BROKERS = "host1:port1,host2:port2,post3:port3"
  private val logger = Logger.getLogger(this.getClass)

  def main(args: Array[String]): Unit = {
    val kfkProperties: Properties = new Properties()

    kfkProperties.setProperty("bootstrap.servers", KAFKA_BROKERS)
    kfkProperties.setProperty("key.serializer", classOf[StringSerializer].getName)
    kfkProperties.setProperty("value.serializer", classOf[StringSerializer].getName)
    kfkProperties.setProperty("acks", "1")
    kfkProperties.setProperty("retries", "3")
    kfkProperties.setProperty("batch.size", "16384")

    val producer: KafkaProducer[String, String] = new KafkaProducer[String, String](kfkProperties)
    val lines: Iterator[String] = Source.fromFile("C:\\Users\\10244615\\Desktop\\hardcodecscan\\file.txt").getLines()
    lines.foreach(
      line => {
        println(line)
        val record: ProducerRecord[String, String] = new ProducerRecord[String, String]("kafka_producer_test", line)
        producer.send(record, new Callback {
          override def onCompletion(metadata: RecordMetadata, exception: Exception): Unit = {
            if (metadata != null) {
              println("发送成功")
              println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date))
            }
            if (exception != null) {
              println("消息发送失败")
            }
          }
        })
      }
    )
    producer.close()
  }
}