没有配置的节点可用java scala elasticsearch

时间:2018-04-17 23:39:36

标签: java scala elasticsearch elasticsearch-plugin elasticsearch-5

我在elasticsearch和scala中遇到了一些错误。我在这个问题上尝试了stackexchange上的每个答案,但仍然无效。

这是我前端的错误

[NoNodeAvailableException:没有配置的节点可用:[{#transport#-1} {ZAUtRH0eTWG6znAh5pTX0A} {localhost} {127.0.0.1:9300}]]     在/ home / agogorishvili / Downloads / collecter / acl2017-non-factoid-qa / Candidate-Retrieval / app / data / ElasticConnector.scala:22

def createIndex(deleteOldIndex: Boolean): Unit = {

if (deleteOldIndex) {

  try {

    this.client.admin.indices.prepareDelete(indexName).get

  } catch {

    case e: IndexNotFoundException => // ok

  }

}

弹性连接器控制器

package data

import java.net.InetAddress

import org.elasticsearch.common.settings.Settings
import org.elasticsearch.common.transport.InetSocketTransportAddress
import org.elasticsearch.index.IndexNotFoundException
import org.elasticsearch.index.query.QueryBuilders
import org.elasticsearch.transport.client.PreBuiltTransportClient
import play.api.libs.json._
import readers.TextItem

class ElasticConnector(host: String, port: Int, indexName: String) {

val client = new PreBuiltTransportClient(Settings.EMPTY)
    .addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(host), port))


def createIndex(deleteOldIndex: Boolean): Unit = {
    if (deleteOldIndex) {
    try {
        this.client.admin.indices.prepareDelete(indexName).get
    } catch {
        case e: IndexNotFoundException => // ok
    }
    }

    val r = this.client.admin.indices.prepareCreate(indexName).addMapping("answer", Mapping.answersMapping)
    r.get
    ()
}

def queryAnswers(query: String, count: Integer): Seq[TextItem] = {
    val response = client.prepareSearch(indexName)
    .setTypes("answer")
    .setQuery(QueryBuilders.matchQuery("text", query))
    .setSize(count)
    .execute
    .actionGet

    response.getHits.hits().map { hit =>
    Json.fromJson[TextItem](Json.parse(hit.getSourceAsString)).asOpt
    }.filter(_.isDefined).map(_.get)
}

def saveAnswers(answers: Seq[TextItem]): Either[String, String] = {
    val bulkRequest = client.prepareBulk
    answers.foreach { answer =>
    val answerJson = Json.toJson(answer).toString()
    bulkRequest.add(client.prepareIndex(this.indexName, "answer", answer.id).setSource(answerJson))
    }
    val response = bulkRequest.get
    if (response.hasFailures) {
    Right(response.buildFailureMessage())
    } else {
    Left("Index creation successful")
    }
}
}

private object Mapping {
val answersMapping =
    """{
    |  "answer": {
    |    "properties": {
    |      "id": {
    |        "type": "text"
    |      },
    |      "text": {
    |        "type": "text",
    |        "similarity": "BM25"
    |      }
    |    }
    |  }
    |}""".stripMargin
}

sbt response

[info] Loading project definition from /home/agogorishvili/Downloads/collecter/acl2017-non-factoid-qa/Candidate-Retrieval/project
[info] Set current project to Candidate-Retrieval (in build file:/home/agogorishvili/Downloads/collecter/acl2017-non-factoid-qa/Candidate-Retrieval/)

--- (Running the application, auto-reloading is enabled) ---

[info] p.c.s.NettyServer - Listening for HTTP on /0:0:0:0:0:0:0:0:8000

(Server started, use Ctrl+D to stop and go back to the console...)

[success] Compiled in 437ms
[warn] application - Logger configuration in conf files is deprecated and has no effect. Use a logback configuration file instead.
[info] play.api.Play - Application started (Dev)
no modules loaded
loaded plugin [org.elasticsearch.index.reindex.ReindexPlugin]
loaded plugin [org.elasticsearch.percolator.PercolatorPlugin]
loaded plugin [org.elasticsearch.script.mustache.MustachePlugin]
loaded plugin [org.elasticsearch.transport.Netty3Plugin]
loaded plugin [org.elasticsearch.transport.Netty4Plugin]
failed to connect to node [{#transport#-1}{m3tYnWsuSpebXQCz6BYqXA}{localhost}{127.0.0.1:9300}], removed from nodes list
org.elasticsearch.transport.ConnectTransportException: [][127.0.0.1:9300] connect_timeout[30s]
    at org.elasticsearch.transport.netty4.Netty4Transport.connectToChannelsLight(Netty4Transport.java:340) ~[transport-netty4-client-5.1.1.jar:5.1.1]
    at org.elasticsearch.transport.TcpTransport.connectToNode(TcpTransport.java:410) ~[elasticsearch-5.1.1.jar:5.1.1]
    at org.elasticsearch.transport.TcpTransport.connectToNodeLight(TcpTransport.java:382) ~[elasticsearch-5.1.1.jar:5.1.1]
    at org.elasticsearch.transport.TransportService.connectToNodeLight(TransportService.java:303) ~[elasticsearch-5.1.1.jar:5.1.1]
    at org.elasticsearch.client.transport.TransportClientNodesService$SimpleNodeSampler.doSample(TransportClientNodesService.java:392) [elasticsearch-5.1.1.jar:5.1.1]
    at org.elasticsearch.client.transport.TransportClientNodesService$NodeSampler.sample(TransportClientNodesService.java:338) [elasticsearch-5.1.1.jar:5.1.1]
    at org.elasticsearch.client.transport.TransportClientNodesService.addTransportAddresses(TransportClientNodesService.java:179) [elasticsearch-5.1.1.jar:5.1.1]
    at org.elasticsearch.client.transport.TransportClient.addTransportAddress(TransportClient.java:301) [elasticsearch-5.1.1.jar:5.1.1]
    at data.ElasticConnector.<init>(ElasticConnector.scala:16) [classes/:?]
    at controllers.Application.elastic$lzycompute(Application.scala:19) [classes/:?]
    at controllers.Application.elastic(Application.scala:19) [classes/:?]
    at controllers.Application$$anonfun$createIndex$1.apply(Application.scala:32) [classes/:?]
    at controllers.Application$$anonfun$createIndex$1.apply(Application.scala:25) [classes/:?]
    at play.api.mvc.ActionBuilder$$anonfun$apply$17.apply(Action.scala:439) [play_2.11-2.4.8.jar:2.4.8]
    at play.api.mvc.ActionBuilder$$anonfun$apply$17.apply(Action.scala:439) [play_2.11-2.4.8.jar:2.4.8]
    at play.api.mvc.ActionBuilder$$anonfun$apply$16.apply(Action.scala:408) [play_2.11-2.4.8.jar:2.4.8]
    at play.api.mvc.ActionBuilder$$anonfun$apply$16.apply(Action.scala:407) [play_2.11-2.4.8.jar:2.4.8]
    at play.api.mvc.Action$.invokeBlock(Action.scala:533) [play_2.11-2.4.8.jar:2.4.8]
    at play.api.mvc.Action$.invokeBlock(Action.scala:530) [play_2.11-2.4.8.jar:2.4.8]
    at play.api.mvc.ActionBuilder$$anon$1.apply(Action.scala:493) [play_2.11-2.4.8.jar:2.4.8]
    at play.api.mvc.Action$$anonfun$apply$1$$anonfun$apply$4$$anonfun$apply$5.apply(Action.scala:105) [play_2.11-2.4.8.jar:2.4.8]
    at play.api.mvc.Action$$anonfun$apply$1$$anonfun$apply$4$$anonfun$apply$5.apply(Action.scala:105) [play_2.11-2.4.8.jar:2.4.8]
    at play.utils.Threads$.withContextClassLoader(Threads.scala:21) [play_2.11-2.4.8.jar:2.4.8]
    at play.api.mvc.Action$$anonfun$apply$1$$anonfun$apply$4.apply(Action.scala:104) [play_2.11-2.4.8.jar:2.4.8]
    at play.api.mvc.Action$$anonfun$apply$1$$anonfun$apply$4.apply(Action.scala:103) [play_2.11-2.4.8.jar:2.4.8]
    at scala.Option.map(Option.scala:146) [scala-library-2.11.7.jar:?]
    at play.api.mvc.Action$$anonfun$apply$1.apply(Action.scala:103) [play_2.11-2.4.8.jar:2.4.8]
    at play.api.mvc.Action$$anonfun$apply$1.apply(Action.scala:96) [play_2.11-2.4.8.jar:2.4.8]
    at play.api.libs.iteratee.DoneIteratee$$anonfun$mapM$2.apply(Iteratee.scala:741) [play-iteratees_2.11-2.4.8.jar:2.4.8]
    at play.api.libs.iteratee.DoneIteratee$$anonfun$mapM$2.apply(Iteratee.scala:741) [play-iteratees_2.11-2.4.8.jar:2.4.8]
    at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24) [scala-library-2.11.7.jar:?]
    at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24) [scala-library-2.11.7.jar:?]
    at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:40) [akka-actor_2.11-2.3.13.jar:?]
    at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:397) [akka-actor_2.11-2.3.13.jar:?]
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) [scala-library-2.11.7.jar:?]
    at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) [scala-library-2.11.7.jar:?]
    at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) [scala-library-2.11.7.jar:?]
    at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) [scala-library-2.11.7.jar:?]
Caused by: io.netty.channel.AbstractChannel$AnnotatedConnectException: Connection refused: localhost/127.0.0.1:9300
    at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method) ~[?:1.8.0_162]
    at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717) ~[?:1.8.0_162]
    at io.netty.channel.socket.nio.NioSocketChannel.doFinishConnect(NioSocketChannel.java:346) ~[netty-transport-4.1.6.Final.jar:4.1.6.Final]
    at io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.finishConnect(AbstractNioChannel.java:340) ~[netty-transport-4.1.6.Final.jar:4.1.6.Final]
    at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:639) ~[netty-transport-4.1.6.Final.jar:4.1.6.Final]
    at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:574) ~[netty-transport-4.1.6.Final.jar:4.1.6.Final]
    at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:488) ~[netty-transport-4.1.6.Final.jar:4.1.6.Final]
    at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:450) ~[netty-transport-4.1.6.Final.jar:4.1.6.Final]
    at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:873) ~[netty-common-4.1.6.Final.jar:4.1.6.Final]
    at java.lang.Thread.run(Thread.java:748) ~[?:1.8.0_162]
[error] application - 

! @77ja5oj76 - Internal server error, for (GET) [/create-index] ->

play.api.http.HttpErrorHandlerExceptions$$anon$1: Execution exception[[NoNodeAvailableException: None of the configured nodes are available: [{#transport#-1}{m3tYnWsuSpebXQCz6BYqXA}{localhost}{127.0.0.1:9300}]]]
    at play.api.http.HttpErrorHandlerExceptions$.throwableToUsefulException(HttpErrorHandler.scala:265) ~[play_2.11-2.4.8.jar:2.4.8]
    at play.api.http.DefaultHttpErrorHandler.onServerError(HttpErrorHandler.scala:191) ~[play_2.11-2.4.8.jar:2.4.8]
    at play.api.GlobalSettings$class.onError(GlobalSettings.scala:179) [play_2.11-2.4.8.jar:2.4.8]
    at play.api.DefaultGlobal$.onError(GlobalSettings.scala:212) [play_2.11-2.4.8.jar:2.4.8]
    at play.api.http.GlobalSettingsHttpErrorHandler.onServerError(HttpErrorHandler.scala:94) [play_2.11-2.4.8.jar:2.4.8]
    at play.core.server.netty.PlayDefaultUpstreamHandler$$anonfun$9$$anonfun$apply$1.applyOrElse(PlayDefaultUpstreamHandler.scala:162) [play-netty-server_2.11-2.4.8.jar:2.4.8]
    at play.core.server.netty.PlayDefaultUpstreamHandler$$anonfun$9$$anonfun$apply$1.applyOrElse(PlayDefaultUpstreamHandler.scala:159) [play-netty-server_2.11-2.4.8.jar:2.4.8]
    at scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:36) [scala-library-2.11.7.jar:na]
    at scala.util.Failure$$anonfun$recover$1.apply(Try.scala:216) [scala-library-2.11.7.jar:na]
    at scala.util.Try$.apply(Try.scala:192) [scala-library-2.11.7.jar:na]
Caused by: org.elasticsearch.client.transport.NoNodeAvailableException: None of the configured nodes are available: [{#transport#-1}{m3tYnWsuSpebXQCz6BYqXA}{localhost}{127.0.0.1:9300}]
    at org.elasticsearch.client.transport.TransportClientNodesService.ensureNodesAreAvailable(TransportClientNodesService.java:328) ~[elasticsearch-5.1.1.jar:5.1.1]
    at org.elasticsearch.client.transport.TransportClientNodesService.execute(TransportClientNodesService.java:226) ~[elasticsearch-5.1.1.jar:5.1.1]
    at org.elasticsearch.client.transport.TransportProxyClient.execute(TransportProxyClient.java:59) ~[elasticsearch-5.1.1.jar:5.1.1]
    at org.elasticsearch.client.transport.TransportClient.doExecute(TransportClient.java:345) ~[elasticsearch-5.1.1.jar:5.1.1]
    at org.elasticsearch.client.support.AbstractClient.execute(AbstractClient.java:403) ~[elasticsearch-5.1.1.jar:5.1.1]
    at org.elasticsearch.client.support.AbstractClient$IndicesAdmin.execute(AbstractClient.java:1226) ~[elasticsearch-5.1.1.jar:5.1.1]
    at org.elasticsearch.action.ActionRequestBuilder.execute(ActionRequestBuilder.java:80) ~[elasticsearch-5.1.1.jar:5.1.1]
    at org.elasticsearch.action.ActionRequestBuilder.execute(ActionRequestBuilder.java:54) ~[elasticsearch-5.1.1.jar:5.1.1]
    at org.elasticsearch.action.ActionRequestBuilder.get(ActionRequestBuilder.java:62) ~[elasticsearch-5.1.1.jar:5.1.1]
    at data.ElasticConnector.createIndex(ElasticConnector.scala:22) ~[classes/:na]

scala app

package controllers

import javax.inject.Inject

import data.ElasticConnector
import play.api.Configuration
import play.api.libs.json.{JsArray, JsObject, JsString}
import play.api.mvc._
import data.readers.{DataReaderRepository}
import data.writers.TSVArchiveWriter
import scala.collection.mutable
import play.api.Logger

class Application @Inject()(configuration: play.api.Configuration) extends Controller {

lazy val indexName = configuration.getString("retrieval.elastic.indexName").get
lazy val elasticHost = configuration.getString("retrieval.elastic.host").get
lazy val elasticPort = configuration.getInt("retrieval.elastic.port").get
lazy val elastic = new ElasticConnector(elasticHost, elasticPort, indexName)

def index = Action {
    Ok("The application is running")
}

def createIndex = Action {
    val reader = DataReaderRepository.getReader(configuration.getString("retrieval.dataset.type").get)
    val datasetPath = configuration.getString("retrieval.dataset.path").get
    val datasetOptions = configuration.getConfig("retrieval.dataset.options").getOrElse(Configuration.empty)

    val answers = reader.readAnswers(datasetPath, datasetOptions)

    elastic.createIndex(true)
    elastic.saveAnswers(answers)

    Ok("Index created")
}

def queryIndex(q: String, n: Int) = Action {
    Ok(
    JsObject(Seq(
        "candidates" -> JsArray(elastic.queryAnswers(q, n).map(a => JsString(a.text)))
    ))
    )
}

def writeTSVArchive = Action {
    // TODO use a streaming response instead of log messages
    Logger.info("Writing TSV Archive")
    val datasetPath = configuration.getString("retrieval.dataset.path").get
    val datasetOptions = configuration.getConfig("retrieval.dataset.options").getOrElse(Configuration.empty)

    Logger.info("Reading source dataset")
    val reader = DataReaderRepository.getReader(configuration.getString("retrieval.dataset.type").get)


    val questionTexts = mutable.Map[String, String]()
    val answerTexts = mutable.Map[String, String]()
    val qas = reader.readQA(datasetPath, datasetOptions)
    val qasMap = qas.map(qa => qa.question.id -> qa).toMap

    // construct all the pools
    Logger.info("Constructing pools")
    val pools = qas.zipWithIndex.map { case (qa, idx) =>
    if(idx % 100 == 0) {
        Logger.info(s"$idx/${qas.length}")
    }
    val questionText = qa.question.text.take(5000)
    val pool = elastic.queryAnswers(questionText, 100)
    questionTexts(qa.question.id) = questionText
    (pool ++ qa.groundTruth).foreach { a =>
        answerTexts(a.id) = a.text
    }
    (qa.question.id, qa.groundTruth.map(_.id), pool.map(_.id))
    }

    // write the files
    val trainSize = configuration.getDouble("tsvWriter.split.train").get
    val validSize = configuration.getDouble("tsvWriter.split.valid").get

    Logger.info("Writing archive")
    val writer = new TSVArchiveWriter(configuration.getString("tsvWriter.path").get)
    writer.write(questionTexts.toMap, answerTexts.toMap, pools, trainSize, validSize)

    Logger.info("Done")
    Ok("Dataset created")
}

}

application.conf

# This is the main configuration file for the application.
# ~~~~~


# Dataset configuration
# ~~~~~

retrieval {
elastic {
    host = "localhost"
    port = 9300
    indexName = "insuranceqa"
}

dataset {
    // can be either insuranceqav1, insuranceqav2, or stackexchange
    type = "insuranceqav2"

    // path to the insuranceQA dataset or the stackexchange archive downloaded from
    // https://archive.org/details/stackexchange
    path = "/home/agogorishvili/Downloads/collecter/acl2017-non-factoid-qa/insuranceQA"

    // dataset options
    options {

    // for insuranceQA v2 we can specify which poolsize we want (100, 500, ...) and which tokenized version should be
    // used. See the official dataset description for more details.
    // pooledAnswers = 500
    // tokenizer = "token"

    // for stackexchange, we can choose to only include answers with a certain score (or greater)
    // minScore = 1

    }

}

}


// This section defines options for the stackexchange dataset creation (TSV). The format was inspired by the
// InsuranceQA.
tsvWriter {
// Output folder
path = "<...>/Data/writer-dataset"

// the split ratios
split {
    train = 0.7
    valid = 0.1
    test = 0.2
}
}


# Play framework configuration
# ~~~~~

# Secret key
# ~~~~~
# The secret key is used to secure cryptographics functions.
# If you deploy your application to several instances be sure to use the same key!
application.secret = "%APPLICATION_SECRET%"

# The application languages
# ~~~~~
application.langs = "en"

# Global object class
# ~~~~~
# Define the Global object class for this application.
# Default to Global in the root package.
# application.global=Global

# Router
# ~~~~~
# Define the Router object to use for this application.
# This router will be looked up first when the application is starting up,
# so make sure this is the entry point.
# Furthermore, it's assumed your route file is named properly.
# So for an application router like `my.application.Router`,
# you may need to define a router file `conf/my.application.routes`.
# Default to Routes in the root package (and conf/routes)
# application.router=my.application.Routes

# Database configuration
# ~~~~~
# You can declare as many datasources as you want.
# By convention, the default datasource is named `default`
#
# db.default.driver=org.h2.Driver
# db.default.url="jdbc:h2:mem:play"
# db.default.user=sa
# db.default.password=""

# Evolutions
# ~~~~~
# You can disable evolutions if needed
# evolutionplugin=disabled

# Logger
# ~~~~~
# You can also configure logback (http://logback.qos.ch/),
# by providing an application-logger.xml file in the conf directory.

# Root logger:
logger.root = ERROR

# Logger used by the framework:
logger.play = INFO

# Logger provided to your application:
logger.application = DEBUG

0 个答案:

没有答案