- You deserve the praise of this Parliament for the work you have done in the budgetary areas and especially in some of the areas where you have tried to change things within the Commission.
- Ni förtjänar att lovordas av detta parlament för det arbete ni har utfört på budgetområdet och speciellt inom några av de områden där ni har försökt ändra saker och ting inom kommissionen.
- Finally, I wish to thank this House and in particular the rapporteurs, Mr Hatzidakis, Mr Ortuondo Larrea, Ms McKenna and Mr Watts, for their understanding of the importance of the first ’Erika’ package which is evidenced by the excellent work already done in such a limited time, and for the generally good cooperation between Parliament and the Commission in the field of maritime safety.
- Tills sist vill jag tacka denna kammare och i synnerhet föredragandena, herr Hatzidakis, herr Ortuondo Larrea, fru McKenna och herr Watts, för deras förståelse för hur viktig det första Erika-paketet var, vilket klart har visats genom det utmärkta arbete som redan utförts på en så kort tid och för det generellt goda samarbetet mellan Europaparlamentet och kommissionen på området sjöfartssäkerhet.
show query
SET search_path TO f9miniensv;
WITH
list AS (SELECT
t11.token_id AS t11,
t12.token_id AS t12,
t21.token_id AS t21,
t22.token_id AS t22,
r1.dep_id AS dep1,
r2.dep_id AS dep2
FROM
deprel r1
JOIN depstr s1 ON s1.dep_id = r1.dep_id
JOIN word_align a1 ON a1.wsource = r1.head AND a1.wsource < a1.wtarget
JOIN word_align a2 ON a2.wsource = r1.dependent
JOIN deprel r2 ON r2.head = a1.wtarget AND r2.dependent = a2.wtarget
JOIN depstr s2 ON s2.dep_id = r2.dep_id
JOIN token t11 ON t11.token_id = r1.head
JOIN token t21 ON t21.token_id = r2.head
JOIN token t12 ON t12.token_id = r1.dependent
JOIN token t22 ON t22.token_id = r2.dependent
WHERE
s1.val = 'prep' AND
t11.ctag = 'VERB' AND
t21.ctag = 'VERB' AND
t12.ctag = 'ADP' AND
t22.ctag = 'ADP' AND
t11.lemma_id = 27461 AND
t12.lemma_id = 8748 AND
t21.lemma_id = 64901 AND
t22.lemma_id = 40192),
stats AS (SELECT
sentence_id,
count(DISTINCT token_id) AS c,
count(*) AS c_aligned,
count(DISTINCT wtarget) AS c_target
FROM
token
LEFT JOIN word_align ON wsource = token_id
WHERE
sentence_id IN (
SELECT sentence_id
FROM
list
JOIN token ON token_id IN(t11, t21)
)
GROUP BY sentence_id),
numbered AS (SELECT row_number() OVER () AS i, *
FROM
list),
sentences AS (SELECT *, .2 * (1 / (1 + exp(max(c) OVER (PARTITION BY i) - min(c) OVER (PARTITION BY i)))) +
.8 * (1 / log(avg(c) OVER (PARTITION BY i))) AS w
FROM
(
SELECT i, 1 AS n, sentence_id, ARRAY[t11,t12] AS tokens
FROM
numbered
JOIN token ON token_id = t11
UNION SELECT i, 2 AS n, sentence_id, ARRAY[t21,t22] AS tokens
FROM
numbered
JOIN token ON token_id = t21
) x
JOIN stats USING (sentence_id)
ORDER BY i, n)
SELECT
i,
n,
w,
c,
c_aligned,
c_target,
sentence_id,
string_agg(CASE WHEN lpad THEN ' ' ELSE '' END || '<span class="token' ||
CASE WHEN ARRAY[token_id] <@ tokens THEN ' hl' ELSE '' END || '">' || val || '</span>',
'' ORDER BY token_id ASC) AS s
FROM
sentences
JOIN token USING (sentence_id)
JOIN typestr USING (type_id)
GROUP BY i, n, w, c, c_aligned, c_target, sentence_id
ORDER BY w DESC, i, n;
;