Add files via upload
This commit is contained in:
parent
7f9068eadf
commit
1610847e43
8
2019/Scala/day06/Counter.scala
Normal file
8
2019/Scala/day06/Counter.scala
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
package day06
|
||||||
|
|
||||||
|
class Counter
|
||||||
|
{
|
||||||
|
private[this] var counter: Int = 0
|
||||||
|
def count: Int = counter
|
||||||
|
def inc(n: Int): Unit = counter += n
|
||||||
|
}
|
45
2019/Scala/day06/Day06.scala
Normal file
45
2019/Scala/day06/Day06.scala
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
package day06
|
||||||
|
|
||||||
|
import scala.collection.mutable
|
||||||
|
|
||||||
|
class Day06(input: List[String])
|
||||||
|
{
|
||||||
|
/** Easy access to all nodes */
|
||||||
|
val nodes = new mutable.HashMap[String, Node]
|
||||||
|
|
||||||
|
/** list of tuples */
|
||||||
|
val tuples: List[(String, String)] = input.map(string => {
|
||||||
|
val split = string.split(')')
|
||||||
|
(split(0), split(1))
|
||||||
|
})
|
||||||
|
|
||||||
|
tuples.foreach(tuple => {
|
||||||
|
val (a , b) = tuple
|
||||||
|
if (!nodes.contains(a)) nodes.addOne(a, new Node(a))
|
||||||
|
if (!nodes.contains(b)) nodes.addOne(b, new Node(b))
|
||||||
|
})
|
||||||
|
|
||||||
|
tuples.foreach(tuple => {
|
||||||
|
val (a , b) = tuple
|
||||||
|
nodes(a).children += nodes(b)
|
||||||
|
nodes(b).parent = nodes(a)
|
||||||
|
})
|
||||||
|
|
||||||
|
def solveP1(): Int =
|
||||||
|
{
|
||||||
|
val start: Node = nodes("COM")
|
||||||
|
val counter = new Counter
|
||||||
|
start.countDepths(0, counter)
|
||||||
|
counter.count
|
||||||
|
}
|
||||||
|
|
||||||
|
def solveP2(): Int =
|
||||||
|
{
|
||||||
|
val start = nodes("YOU")
|
||||||
|
val end = nodes("SAN")
|
||||||
|
val youParents = start.getParents
|
||||||
|
val mergePoint = end.findFirstCommon(youParents)
|
||||||
|
start.findDistanceToParent(mergePoint) + end.findDistanceToParent(mergePoint)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
18
2019/Scala/day06/Main.scala
Normal file
18
2019/Scala/day06/Main.scala
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
package day06
|
||||||
|
|
||||||
|
import kamlib.{Reader, Wrapper}
|
||||||
|
|
||||||
|
object Main {
|
||||||
|
|
||||||
|
def main(args: Array[String]): Unit = {
|
||||||
|
val input: List[String] = Reader.readList("/input6.txt")
|
||||||
|
val tuple = Wrapper(new Day06(input)).tuple
|
||||||
|
println(s"Time initializing data structures: ${tuple._2}ms")
|
||||||
|
val solution = tuple._1
|
||||||
|
|
||||||
|
println("Part 1:")
|
||||||
|
Wrapper(solution.solveP1()).print()
|
||||||
|
println("Part 2:")
|
||||||
|
Wrapper(solution.solveP2()).print()
|
||||||
|
}
|
||||||
|
}
|
43
2019/Scala/day06/Tree.scala
Normal file
43
2019/Scala/day06/Tree.scala
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
package day06
|
||||||
|
|
||||||
|
import scala.collection.mutable
|
||||||
|
|
||||||
|
sealed trait Tree
|
||||||
|
case object Leaf extends Tree
|
||||||
|
case class Node(name: String, var parent: Tree = Leaf) extends Tree
|
||||||
|
{
|
||||||
|
val children = new mutable.ArrayBuffer[Node]
|
||||||
|
|
||||||
|
def countDepths(depth: Int, counter: Counter): Unit =
|
||||||
|
{
|
||||||
|
counter.inc(depth)
|
||||||
|
children.foreach(child => {
|
||||||
|
child.countDepths(depth+1, counter)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
def getParents: List[Node] =
|
||||||
|
parent match
|
||||||
|
{
|
||||||
|
case Leaf => List()
|
||||||
|
case p: Node => p::p.getParents
|
||||||
|
}
|
||||||
|
|
||||||
|
def findFirstCommon(nodes: List[Node]): Node =
|
||||||
|
{
|
||||||
|
parent match
|
||||||
|
{
|
||||||
|
case Leaf => throw new Exception("Something went wrong")
|
||||||
|
case p: Node => if (nodes.contains(p)) p else p.findFirstCommon(nodes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def findDistanceToParent(target: Node, acc: Int = 0): Int =
|
||||||
|
{
|
||||||
|
parent match
|
||||||
|
{
|
||||||
|
case Leaf => throw new Exception("Something went wrong")
|
||||||
|
case p: Node => if (p == target) acc else p.findDistanceToParent(target, acc+1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in a new issue