- Mr President, Mr Billström, Commissioner Barrot, I agree with the importance and support the emphasis that both the Presidency and the Commission have placed on identifying this as one of the most influential dimensions of globalisation that needs a European response.
- Jag instämmer i den betydelse och stöder den betoning som både ordförandeskapet och kommissionen har fäst vid att identifiera detta som en av de mest inflytelserika dimensioner av globaliseringen som kräver ett europeiskt svar.
- That is why the Committee on Civil Liberties, Justice and Home Affairs has supported the strengthening of Frontex and also the increase of its budget, which we hope will be supported in this House, because we support the emphasis and importance placed on this matter by the Swedish Presidency.
- Det är anledningen till att utskottet för medborgerliga fri- och rättigheter samt rättsliga och inrikes frågor stöder en förstärkning av Frontex och också en ökning av dess budget som vi hoppas kommer att få stöd i Europaparlamentet, för vi stöder den betoning och betydelse som det svenska ordförandeskapet har fäst vid denna fråga.
show query
SET search_path TO f9miniensv;
WITH
list AS (SELECT
t11.token_id AS t11,
t12.token_id AS t12,
t21.token_id AS t21,
t22.token_id AS t22,
r1.dep_id AS dep1,
r2.dep_id AS dep2
FROM
deprel r1
JOIN depstr s1 ON s1.dep_id = r1.dep_id
JOIN word_align a1 ON a1.wsource = r1.head AND a1.wsource < a1.wtarget
JOIN word_align a2 ON a2.wsource = r1.dependent
JOIN deprel r2 ON r2.head = a1.wtarget AND r2.dependent = a2.wtarget
JOIN depstr s2 ON s2.dep_id = r2.dep_id
JOIN token t11 ON t11.token_id = r1.head
JOIN token t21 ON t21.token_id = r2.head
JOIN token t12 ON t12.token_id = r1.dependent
JOIN token t22 ON t22.token_id = r2.dependent
WHERE
s1.val = 'prep' AND
t11.ctag = 'VERB' AND
t21.ctag = 'VERB' AND
t12.ctag = 'ADP' AND
t22.ctag = 'ADP' AND
t11.lemma_id = 17987 AND
t12.lemma_id = 11075 AND
t21.lemma_id = 3133 AND
t22.lemma_id = 50540),
stats AS (SELECT
sentence_id,
count(DISTINCT token_id) AS c,
count(*) AS c_aligned,
count(DISTINCT wtarget) AS c_target
FROM
token
LEFT JOIN word_align ON wsource = token_id
WHERE
sentence_id IN (
SELECT sentence_id
FROM
list
JOIN token ON token_id IN(t11, t21)
)
GROUP BY sentence_id),
numbered AS (SELECT row_number() OVER () AS i, *
FROM
list),
sentences AS (SELECT *, .2 * (1 / (1 + exp(max(c) OVER (PARTITION BY i) - min(c) OVER (PARTITION BY i)))) +
.8 * (1 / log(avg(c) OVER (PARTITION BY i))) AS w
FROM
(
SELECT i, 1 AS n, sentence_id, ARRAY[t11,t12] AS tokens
FROM
numbered
JOIN token ON token_id = t11
UNION SELECT i, 2 AS n, sentence_id, ARRAY[t21,t22] AS tokens
FROM
numbered
JOIN token ON token_id = t21
) x
JOIN stats USING (sentence_id)
ORDER BY i, n)
SELECT
i,
n,
w,
c,
c_aligned,
c_target,
sentence_id,
string_agg(CASE WHEN lpad THEN ' ' ELSE '' END || '<span class="token' ||
CASE WHEN ARRAY[token_id] <@ tokens THEN ' hl' ELSE '' END || '">' || val || '</span>',
'' ORDER BY token_id ASC) AS s
FROM
sentences
JOIN token USING (sentence_id)
JOIN typestr USING (type_id)
GROUP BY i, n, w, c, c_aligned, c_target, sentence_id
ORDER BY w DESC, i, n;
;