Remove some unecessary implictly with Factory (#12559)

CHANGELOG_BEGIN
CHANGELOG_END
This commit is contained in:
Remy 2022-01-26 14:28:26 +01:00 committed by GitHub
parent 9c3f1ce93a
commit 5cdf9b95fb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 14 additions and 37 deletions

View File

@ -22,7 +22,6 @@ import Value._
import com.daml.scalautil.Statement.discard
import scala.annotation.tailrec
import scala.collection.Factory
import scala.collection.immutable
/** An in-memory representation of a ledger for scenarios */
@ -308,8 +307,7 @@ object ScenarioLedger {
/** Collect all contract ids appearing in a value
*/
def collectCoids(value: Value): Set[ContractId] = {
val coids =
implicitly[Factory[ContractId, Set[ContractId]]].newBuilder
val coids = Set.newBuilder[ContractId]
def collect(v: Value): Unit =
v match {
case ValueRecord(tycon @ _, fs) =>

View File

@ -17,7 +17,6 @@ import com.daml.scalautil.Statement.discard
import com.daml.nameof.NameOf
import scala.jdk.CollectionConverters._
import scala.collection.Factory
import scala.collection.immutable.TreeMap
import scala.util.hashing.MurmurHash3
@ -228,9 +227,7 @@ object SValue {
def apply(isTextMap: Boolean, entries: Iterator[(SValue, SValue)]): SMap = {
SMap(
isTextMap,
implicitly[Factory[(SValue, SValue), TreeMap[SValue, SValue]]].fromSpecific(entries.map {
case p @ (k, _) => comparable(k); p
}),
entries.map { case p @ (k, _) => comparable(k); p }.to(TreeMap),
)
}

View File

@ -21,7 +21,6 @@ import iface.{
Variant,
PrimType => PT,
}
import scala.collection.Factory
import scalaz.{@@, Order, Ordering, Tag}
import scalaz.syntax.bitraverse._
import scalaz.syntax.traverse._
@ -243,10 +242,7 @@ object TypedValueGenerators {
override def inj(hl: Inj) =
ValueRecord(
Some(name),
implicitly[
Factory[(Some[Ref.Name], Value), ImmArray[(Some[Ref.Name], Value)]]
]
.fromSpecific(lfvFieldNames zip spec.injRec(hl)),
(lfvFieldNames zip spec.injRec(hl)).to(ImmArray),
)
override def prj = {
case ValueRecord(_, fields) if fields.length == spec.t.length =>

View File

@ -15,8 +15,7 @@ import scalaz.syntax.std.boolean._
import scalaz.syntax.tag._
import scala.annotation.nowarn
import scala.collection.Factory
import scala.collection.{mutable, immutable => imm}
import scala.collection.{Factory, mutable, immutable => imm}
sealed abstract class Primitive extends PrimitiveInstances {
type Int64 = Long

View File

@ -12,7 +12,6 @@ import scalaz.std.option._
import scalaz.syntax.traverse._
import scala.annotation.tailrec
import scala.collection.Factory
import scala.{specialized => sp}
sealed trait DamlCodecs // always include `object DamlCodecs` in implicit search
@ -118,7 +117,7 @@ object DamlCodecs extends encoding.ValuePrimitiveEncoding[Value] {
private[this] def seqAlterTraverse[A, B, That](
xs: Iterable[A]
)(f: A => Option[B])(implicit factory: Factory[B, That]): Option[That] = {
)(f: A => Option[B])(implicit factory: collection.Factory[B, That]): Option[That] = {
val bs = factory.newBuilder
val i = xs.iterator
@tailrec def go(): Option[That] =

View File

@ -10,8 +10,6 @@ import scalaz.{Semigroup, \/}
import scalaz.std.tuple._
import scalaz.syntax.functor._
import scala.collection.Factory
private[daml] sealed abstract class ContractStreamStep[+D, +C] extends Product with Serializable {
import ContractStreamStep._
@ -45,7 +43,7 @@ private[daml] sealed abstract class ContractStreamStep[+D, +C] extends Product w
mapInserts(_ map f)
def partitionBimap[LD, DD, LC, CC, LDS](f: D => (LD \/ DD), g: C => (LC \/ CC))(implicit
LDS: Factory[LD, LDS]
LDS: collection.Factory[LD, LDS]
): (LDS, Inserts[LC], ContractStreamStep[DD, CC]) =
this match {
case Acs(inserts) =>

View File

@ -10,7 +10,6 @@ import com.daml.ledger.api.v1.{event => evv1}
import scalaz.{Monoid, \/, \/-}
import scalaz.syntax.tag._
import scala.collection.Factory
import scala.runtime.AbstractFunction1
private[daml] final case class InsertDeleteStep[+D, +C](
@ -40,13 +39,13 @@ private[daml] final case class InsertDeleteStep[+D, +C](
def partitionMapPreservingIds[LC, CC](
f: C => (LC \/ CC)
): (Inserts[LC], InsertDeleteStep[D, CC]) = {
val (_, lcs, step) = partitionBimap(\/-(_), f)(implicitly[Factory[Unit, List[Unit]]])
val (_, lcs, step) = partitionBimap(\/-(_), f)(List)
(lcs, step)
}
/** Results undefined if cid(cc) != cid(c) */
def partitionBimap[LD, DD, LC, CC, LDS](f: D => (LD \/ DD), g: C => (LC \/ CC))(implicit
LDS: Factory[LD, LDS]
LDS: collection.Factory[LD, LDS]
): (LDS, Inserts[LC], InsertDeleteStep[DD, CC]) = {
import scalaz.std.tuple._, scalaz.std.either._, scalaz.syntax.traverse._
val (lcs, ins) = inserts partitionMap (x => g(x).toEither)

View File

@ -48,7 +48,6 @@ import com.daml.logging.{ContextualizedLogger, LoggingContextOf}
import com.daml.metrics.Metrics
import spray.json.{JsArray, JsObject, JsValue, JsonReader, JsonWriter, enrichAny => `sj enrichAny`}
import scala.collection.Factory
import scala.collection.mutable.HashSet
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success}
@ -953,7 +952,7 @@ class WebSocketService(
.fromLedgerApi(ce)
.liftErr(ServerError)
.flatMap(_.traverse(apiValueToLfValue).liftErr(ServerError)),
)(implicitly[Factory[ServerError, Seq[ServerError]]])
)(Seq)
StepAndErrors(
errors ++ aerrors,
dstep mapInserts { inserts: Vector[domain.ActiveContract[LfV]] =>

View File

@ -9,8 +9,7 @@ import com.daml.lf.data.Ref
import com.daml.lf.data.Time.Timestamp
import com.daml.logging.{ContextualizedLogger, LoggingContext}
import scala.collection.Factory
import scala.collection.mutable
import scala.collection.{Factory, mutable}
/** Commit context provides access to state inputs, commit parameters (e.g. record time) and
* allows committer to set state outputs.

View File

@ -7,7 +7,6 @@ import com.daml.ledger.participant.state.kvutils.store.{DamlStateKey, DamlStateV
import com.daml.ledger.participant.state.kvutils.{Envelope, Raw}
import scala.collection.SortedMap
import scala.collection.Factory
final class StateSerializationStrategy(keyStrategy: StateKeySerializationStrategy) {
def serializeState(key: DamlStateKey, value: DamlStateValue): Raw.StateEntry =
@ -16,8 +15,5 @@ final class StateSerializationStrategy(keyStrategy: StateKeySerializationStrateg
def serializeStateUpdates(
state: Map[DamlStateKey, DamlStateValue]
): SortedMap[Raw.StateKey, Raw.Envelope] =
implicitly[Factory[Raw.StateEntry, SortedMap[Raw.StateKey, Raw.Envelope]]]
.fromSpecific(state.view.map { case (key, value) =>
serializeState(key, value)
})
state.view.map { case (key, value) => serializeState(key, value) }.to(SortedMap)
}

View File

@ -3,8 +3,7 @@
package com.daml.resources
import scala.collection.Factory
import scala.collection.mutable
import scala.collection.{Factory, mutable}
private[resources] final class UnitCanBuildFrom[T, C[_]] extends Factory[T, Unit] {
override def fromSpecific(it: IterableOnce[T]) = ()

View File

@ -5,7 +5,6 @@ package com.daml.resources
import com.daml.resources.HasExecutionContext.executionContext
import scala.collection.Factory
import scala.concurrent.Future
import scala.util.Try
@ -56,7 +55,7 @@ final class ResourceFactories[Context: HasExecutionContext] {
* @return A [[Resource]] with a sequence of the values of the sequenced [[Resource]]s as its underlying value.
*/
def sequence[T, C[X] <: Iterable[X], U](seq: C[R[T]])(implicit
bf: Factory[T, U],
bf: collection.Factory[T, U],
context: Context,
): R[U] = new R[U] {
private val resource = seq

View File

@ -15,7 +15,6 @@ import scala.annotation.tailrec
import com.daml.scalautil.Statement.discard
import scala.collection.Factory
import scala.collection.immutable.{IndexedSeq, Iterable, LinearSeq}
/** A variant of [[scalaz.CorecursiveList]] that emits a final state
@ -54,7 +53,7 @@ private[trigger] sealed abstract class UnfoldState[T, A] {
}
}
final def runTo[FA](implicit factory: Factory[A, FA]): (FA, T) = {
final def runTo[FA](implicit factory: collection.Factory[A, FA]): (FA, T) = {
val b = factory.newBuilder
val t = foreach(a => discard(b += a))
(b.result(), t)